Compare commits
616 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1527853efd | ||
|
|
d820cf73e9 | ||
|
|
8714b24420 | ||
|
|
0c18f81b80 | ||
|
|
d442fac69a | ||
|
|
76c57af021 | ||
|
|
27a0425e5d | ||
|
|
4bade7ef96 | ||
|
|
a0e33bf7b0 | ||
|
|
cbc39977ca | ||
|
|
06fc24df5e | ||
|
|
9543abd2d9 | ||
|
|
004d3e4cca | ||
|
|
25aff66d34 | ||
|
|
9bd77b2ed3 | ||
|
|
5de1c1c98f | ||
|
|
5b9b5eaa28 | ||
|
|
00ee72396f | ||
|
|
aa4d55004c | ||
|
|
468f6c757e | ||
|
|
0907d43783 | ||
|
|
c9e5173bbd | ||
|
|
0019faaa17 | ||
|
|
e30f53f206 | ||
|
|
f2ba25e09d | ||
|
|
405de56c71 | ||
|
|
ba571f2404 | ||
|
|
4c1785d5f6 | ||
|
|
fa4d5ee929 | ||
|
|
8720cec283 | ||
|
|
72b36ac144 | ||
|
|
79896ee85c | ||
|
|
408ee04014 | ||
|
|
3f594e86a1 | ||
|
|
46c1a8e4c6 | ||
|
|
b2d3e01fe6 | ||
|
|
681639985a | ||
|
|
a1ca1a944a | ||
|
|
4f49f7b9df | ||
|
|
fb584853dd | ||
|
|
cc47b06756 | ||
|
|
b68a620fc2 | ||
|
|
e417ca54a0 | ||
|
|
5bbf8454d0 | ||
|
|
67b8dd0913 | ||
|
|
a42eb0d2bd | ||
|
|
294e659ae8 | ||
|
|
55198c49e5 | ||
|
|
dc071a3995 | ||
|
|
db5d0ac6ac | ||
|
|
2802013dc6 | ||
|
|
37bafce384 | ||
|
|
da0813b049 | ||
|
|
e2bb2595b3 | ||
|
|
b7cbcfdcab | ||
|
|
9cde25b39e | ||
|
|
49ea0a8e2e | ||
|
|
d317ddfebe | ||
|
|
b1eef4c82d | ||
|
|
a4ad365de0 | ||
|
|
8c257d55b4 | ||
|
|
bd165ebc4d | ||
|
|
348e5cb522 | ||
|
|
7cc17f208c | ||
|
|
2913cd936a | ||
|
|
361a185ddb | ||
|
|
9d5615620a | ||
|
|
ae185087e7 | ||
|
|
4a96d29e69 | ||
|
|
1e44f33163 | ||
|
|
348493abcd | ||
|
|
2bc4bfa877 | ||
|
|
3e3b287761 | ||
|
|
74bbb1ef1b | ||
|
|
22405fbb68 | ||
|
|
14d935bd6c | ||
|
|
363b9d3c7b | ||
|
|
8347ae3766 | ||
|
|
1106f6b9f2 | ||
|
|
e22117304e | ||
|
|
10999c0283 | ||
|
|
7adeee0980 | ||
|
|
a9cfa32c34 | ||
|
|
718aa8b82f | ||
|
|
dbf3bed380 | ||
|
|
ef55c7c916 | ||
|
|
2015d3d2bc | ||
|
|
76bac6d4bf | ||
|
|
b960d476e3 | ||
|
|
8a311d7746 | ||
|
|
39d2baec8a | ||
|
|
26e063177d | ||
|
|
149d1946ee | ||
|
|
3604f6238d | ||
|
|
2ad9470670 | ||
|
|
8dde74854a | ||
|
|
fa5aadbbc0 | ||
|
|
0989cb8866 | ||
|
|
b536260eb5 | ||
|
|
8f2de4663e | ||
|
|
0693892521 | ||
|
|
3be78717d2 | ||
|
|
655d123785 | ||
|
|
b8cb36ce50 | ||
|
|
bc3cd02776 | ||
|
|
a027adcaa2 | ||
|
|
3783032d28 | ||
|
|
c0ac5e3f6b | ||
|
|
87371d58d5 | ||
|
|
ef11fd7f75 | ||
|
|
d0f046e788 | ||
|
|
794fb9b252 | ||
|
|
86c7d646c3 | ||
|
|
7d96077667 | ||
|
|
a6fbfedc08 | ||
|
|
a6cfeabc10 | ||
|
|
a3a29006aa | ||
|
|
a5cb7a9816 | ||
|
|
edf4aa9f52 | ||
|
|
02c680ec3a | ||
|
|
8248e180ca | ||
|
|
c9677b2465 | ||
|
|
3752fe9e42 | ||
|
|
8a0de10f60 | ||
|
|
6aa8d76e32 | ||
|
|
fb1d0f60d9 | ||
|
|
728eaa515f | ||
|
|
7c354095a9 | ||
|
|
64ef33767f | ||
|
|
265432f7b7 | ||
|
|
aa7dfdb5c7 | ||
|
|
bfe37435a4 | ||
|
|
285a50f862 | ||
|
|
995e82e72b | ||
|
|
3935e047c6 | ||
|
|
0fd2427d9b | ||
|
|
30d29d0201 | ||
|
|
3e1f2392d4 | ||
|
|
6a12a40208 | ||
|
|
90e73391c2 | ||
|
|
deec1b7c2e | ||
|
|
d9cb1a1070 | ||
|
|
01747b50df | ||
|
|
df01a11e07 | ||
|
|
7a4b479724 | ||
|
|
89e34d56a1 | ||
|
|
a5853ee51a | ||
|
|
537ab48167 | ||
|
|
e43a090877 | ||
|
|
275a2f35b5 | ||
|
|
dae746bb96 | ||
|
|
3923b81d87 | ||
|
|
5d582a5e48 | ||
|
|
7dbc712fba | ||
|
|
639ef9e24a | ||
|
|
86d2200523 | ||
|
|
fe6860e0d7 | ||
|
|
8f2e68aeb8 | ||
|
|
bc4c887452 | ||
|
|
b3534b4435 | ||
|
|
861bb4d35a | ||
|
|
65e7ec0509 | ||
|
|
1ab8886695 | ||
|
|
26136c337f | ||
|
|
e3b71b32aa | ||
|
|
6d1711f4c2 | ||
|
|
26f291ef25 | ||
|
|
da030617a1 | ||
|
|
1ebfcd3b18 | ||
|
|
d385a57da3 | ||
|
|
37df8bfc73 | ||
|
|
b781a530a1 | ||
|
|
390b0aa25c | ||
|
|
620835e30c | ||
|
|
da10487219 | ||
|
|
4d51810888 | ||
|
|
6c7b2e1056 | ||
|
|
749e99bf11 | ||
|
|
6db8e0a61e | ||
|
|
6fe914421a | ||
|
|
9275f180bb | ||
|
|
2541b42fc2 | ||
|
|
fb340f130b | ||
|
|
d2ddd99ef6 | ||
|
|
492a8111b9 | ||
|
|
d846664165 | ||
|
|
31b3e4898b | ||
|
|
82f1267486 | ||
|
|
19202b2528 | ||
|
|
831cbec71f | ||
|
|
bb2ff802e2 | ||
|
|
83e9537f57 | ||
|
|
3488e83deb | ||
|
|
c116eee921 | ||
|
|
9cb291b41b | ||
|
|
c0f1072dc7 | ||
|
|
3108036533 | ||
|
|
215c699307 | ||
|
|
f609093c6e | ||
|
|
eb4fd98f09 | ||
|
|
08da9a854a | ||
|
|
3a18fe04cc | ||
|
|
512e41a84a | ||
|
|
8089aedde1 | ||
|
|
6b9e103305 | ||
|
|
00396fbe6c | ||
|
|
a3be9cb853 | ||
|
|
81f58229ab | ||
|
|
2eb16a8ea2 | ||
|
|
9db782f8d9 | ||
|
|
633df54520 | ||
|
|
e2a7f2ee9a | ||
|
|
28c49930e2 | ||
|
|
6c1868f8ae | ||
|
|
4f992475f4 | ||
|
|
7a358c9005 | ||
|
|
b5079614b0 | ||
|
|
482525d0f9 | ||
|
|
599220924d | ||
|
|
d341be83e5 | ||
|
|
b027d7d0de | ||
|
|
0357df0c8b | ||
|
|
f70ae6e3c4 | ||
|
|
921ed1c21b | ||
|
|
c95d43771a | ||
|
|
db3d816f8b | ||
|
|
1d6a142608 | ||
|
|
98271c3712 | ||
|
|
e3f6df884e | ||
|
|
b0f36b2cd9 | ||
|
|
5524492e25 | ||
|
|
112f045352 | ||
|
|
72ed8711a7 | ||
|
|
55e0c2c900 | ||
|
|
e20c05fcdf | ||
|
|
36183eac40 | ||
|
|
7254c76b68 | ||
|
|
e0e6ff5a68 | ||
|
|
b0d49d4973 | ||
|
|
4ce5d14066 | ||
|
|
9e9653ac58 | ||
|
|
bec873aec9 | ||
|
|
bf7f54449b | ||
|
|
9f0e3c0fed | ||
|
|
79e7da9420 | ||
|
|
0f43074f3e | ||
|
|
d297098504 | ||
|
|
284eaf1afe | ||
|
|
da637dba84 | ||
|
|
931fce2c24 | ||
|
|
42fbf17c82 | ||
|
|
d9cacf664c | ||
|
|
7bf6780de2 | ||
|
|
91178d40f3 | ||
|
|
2817cd2936 | ||
|
|
92a6ddcbc3 | ||
|
|
58017f484c | ||
|
|
86d2e1af97 | ||
|
|
bf23be291b | ||
|
|
3b32a9918c | ||
|
|
f0260c84b4 | ||
|
|
8746c75db0 | ||
|
|
e8f230199f | ||
|
|
1e3c7abc1c | ||
|
|
12e51da102 | ||
|
|
992291c738 | ||
|
|
78e63fa257 | ||
|
|
487ea8b263 | ||
|
|
0d877b4184 | ||
|
|
994544f208 | ||
|
|
36bb2a5d09 | ||
|
|
071c1c38dc | ||
|
|
b480ae2b7d | ||
|
|
7a390f503d | ||
|
|
b7cb3462d4 | ||
|
|
f2230100ac | ||
|
|
4b06862a7f | ||
|
|
06057d6dba | ||
|
|
bb03a1768b | ||
|
|
75ed26258c | ||
|
|
1da8477a3c | ||
|
|
4c111554ae | ||
|
|
615312fcee | ||
|
|
453625014d | ||
|
|
1b75651af6 | ||
|
|
b3e3604f46 | ||
|
|
6c4ce379ca | ||
|
|
a9dcba76ce | ||
|
|
32f21f2a01 | ||
|
|
e60bbd1bfc | ||
|
|
71c5b948d0 | ||
|
|
726d6b9881 | ||
|
|
aff64c02e8 | ||
|
|
31ae5cba91 | ||
|
|
0a91d2411a | ||
|
|
5f3af3d5e4 | ||
|
|
37158cb913 | ||
|
|
a596e16a37 | ||
|
|
4e69f5121f | ||
|
|
2a0f4393c3 | ||
|
|
c9e1e7d90c | ||
|
|
7170f2252c | ||
|
|
b09073e01e | ||
|
|
2d509dc3eb | ||
|
|
8a9d45cc29 | ||
|
|
b25f8643ff | ||
|
|
9120b6644d | ||
|
|
68dbd25f5f | ||
|
|
9e54134daf | ||
|
|
08a9345fcc | ||
|
|
7d072cc16f | ||
|
|
d81f457e7a | ||
|
|
3969653f1b | ||
|
|
db37672abf | ||
|
|
1f75ca96df | ||
|
|
605835fe42 | ||
|
|
6c2748c37d | ||
|
|
210d221357 | ||
|
|
c9ef5c8152 | ||
|
|
0274091204 | ||
|
|
3f2f0cc146 | ||
|
|
a236fce31f | ||
|
|
f77a38a96b | ||
|
|
6d55066b94 | ||
|
|
78132f2d6b | ||
|
|
fba47e87d7 | ||
|
|
bf8e8d9f5f | ||
|
|
ebc0e9be19 | ||
|
|
b6f08f1d4e | ||
|
|
d9724597ed | ||
|
|
c90c946d52 | ||
|
|
bb66d15d08 | ||
|
|
1cd2d07f0b | ||
|
|
43cb49d87a | ||
|
|
5165524ca6 | ||
|
|
38dc3a6896 | ||
|
|
115e448bd3 | ||
|
|
5c25437c9f | ||
|
|
0e72ff4a0d | ||
|
|
90edd18d1f | ||
|
|
40dbadb868 | ||
|
|
c16546bd4c | ||
|
|
143e6a4fbb | ||
|
|
13816f19fd | ||
|
|
34ff3b408b | ||
|
|
aec88ef3e6 | ||
|
|
9fda22d942 | ||
|
|
550613ee3a | ||
|
|
5341834ebe | ||
|
|
fc027e34d1 | ||
|
|
aef58776b4 | ||
|
|
4e17a40d09 | ||
|
|
d97251a4c6 | ||
|
|
2a66a67132 | ||
|
|
10fd69ec74 | ||
|
|
9889a0cac6 | ||
|
|
65ffd0dd63 | ||
|
|
3d2c779b87 | ||
|
|
b57c2a10d4 | ||
|
|
849a0a5eeb | ||
|
|
7e4a6160b9 | ||
|
|
646e7b4b00 | ||
|
|
eea86c3981 | ||
|
|
37c48fc82c | ||
|
|
47daba3dc1 | ||
|
|
46062d98fd | ||
|
|
4e37084ed4 | ||
|
|
e7c229393d | ||
|
|
87fd9c3e93 | ||
|
|
676f2a3175 | ||
|
|
5fe7f5329b | ||
|
|
c6a3dd03ab | ||
|
|
ebdd5e28f2 | ||
|
|
068abdd105 | ||
|
|
9ec372a86c | ||
|
|
5e05f6b7c6 | ||
|
|
7988d8c67d | ||
|
|
658a3f2fdb | ||
|
|
582f0289af | ||
|
|
95b737d923 | ||
|
|
d7e9a13f53 | ||
|
|
25e45f9f07 | ||
|
|
38cc51ec36 | ||
|
|
5c10c29923 | ||
|
|
daf95227bf | ||
|
|
480a5e0848 | ||
|
|
783e253f7d | ||
|
|
716196930a | ||
|
|
f42402129a | ||
|
|
419878f035 | ||
|
|
32927fd1c1 | ||
|
|
092a5a8d75 | ||
|
|
f082b19058 | ||
|
|
eb7e26006f | ||
|
|
2d3d9640dc | ||
|
|
f95080393a | ||
|
|
0b8203940a | ||
|
|
3bd7859e7f | ||
|
|
599b612edf | ||
|
|
6f9126d738 | ||
|
|
59399ab1f8 | ||
|
|
b67f619448 | ||
|
|
2755706115 | ||
|
|
43ed8a12b7 | ||
|
|
3f5b4a4907 | ||
|
|
cd16478aba | ||
|
|
aa87c747f9 | ||
|
|
9210b19398 | ||
|
|
4d156b9e6b | ||
|
|
56df8b61a2 | ||
|
|
170867e38a | ||
|
|
5ca9fd6c82 | ||
|
|
ef165c80b3 | ||
|
|
6859694e8e | ||
|
|
aeaca77630 | ||
|
|
2a9d1d444b | ||
|
|
abad16c198 | ||
|
|
4c0bc39054 | ||
|
|
501d322264 | ||
|
|
59fc09e93f | ||
|
|
8bd54e2f8f | ||
|
|
bd8d121a23 | ||
|
|
2bd487988f | ||
|
|
452e56f467 | ||
|
|
62e94b0cb7 | ||
|
|
5d331d4141 | ||
|
|
bd8d0324a9 | ||
|
|
d79a3ecc28 | ||
|
|
dac3b45387 | ||
|
|
31fe78e378 | ||
|
|
e16ca49036 | ||
|
|
a7ee72708a | ||
|
|
c4a53cf376 | ||
|
|
e963d02a07 | ||
|
|
f89451b4b8 | ||
|
|
9c87506c2c | ||
|
|
35223543e9 | ||
|
|
e1b22325ea | ||
|
|
2389047072 | ||
|
|
f49c58708b | ||
|
|
d916073397 | ||
|
|
bfa514af98 | ||
|
|
c57e2140c2 | ||
|
|
9e21d490ea | ||
|
|
3b4ca0374e | ||
|
|
7d33c36a30 | ||
|
|
4f14db19c4 | ||
|
|
a0dcf9620b | ||
|
|
9a9c3a984b | ||
|
|
a3543e2248 | ||
|
|
4cdbb04b15 | ||
|
|
2594be9459 | ||
|
|
8f07983ab6 | ||
|
|
8e34eaaa75 | ||
|
|
7e7d373ab3 | ||
|
|
dbd520f851 | ||
|
|
4fd7fa5fc1 | ||
|
|
84d2097a8c | ||
|
|
842f19c6e3 | ||
|
|
577ea0a0a8 | ||
|
|
4d8f9ff3e8 | ||
|
|
60ce6c7302 | ||
|
|
d111bde69e | ||
|
|
3045ed0e43 | ||
|
|
2de0e30016 | ||
|
|
15085ef6e5 | ||
|
|
0c2849da4d | ||
|
|
75eeab1297 | ||
|
|
b8e32ff6b3 | ||
|
|
e7924148af | ||
|
|
d454cf4711 | ||
|
|
ba81ef50f5 | ||
|
|
d5dd4b8b5d | ||
|
|
0e47548cb6 | ||
|
|
74974d85f6 | ||
|
|
7925bb575e | ||
|
|
976cb02f78 | ||
|
|
f1bdc74789 | ||
|
|
920347c21a | ||
|
|
c00ece6cf2 | ||
|
|
7b0157c7e7 | ||
|
|
1b3a2ba06a | ||
|
|
dc22a80f86 | ||
|
|
27d6a75692 | ||
|
|
70456ce4a7 | ||
|
|
078369ec2b | ||
|
|
da8e022f6b | ||
|
|
c8a3f1a8ae | ||
|
|
be3f7b5da3 | ||
|
|
68d7ce3bb6 | ||
|
|
7a490e95b6 | ||
|
|
088e4c6b64 | ||
|
|
76816d722a | ||
|
|
cbabdf283c | ||
|
|
060bc01273 | ||
|
|
0280ac51ed | ||
|
|
bd442fea40 | ||
|
|
fb9e5fcc4f | ||
|
|
bc02453f6d | ||
|
|
74a6c48d96 | ||
|
|
854f169ded | ||
|
|
94c2043b28 | ||
|
|
579df768ca | ||
|
|
e004db8f77 | ||
|
|
2d1cca2839 | ||
|
|
8dae369a69 | ||
|
|
0d76436780 | ||
|
|
34402beeb7 | ||
|
|
6a249944f5 | ||
|
|
6838ee6bb8 | ||
|
|
d50f296d4f | ||
|
|
e5d227e73d | ||
|
|
af7b4c5a2f | ||
|
|
75e9742d71 | ||
|
|
e2f2559a5a | ||
|
|
cac7f9774a | ||
|
|
6c4f8e62ae | ||
|
|
cb03db3d69 | ||
|
|
372bbb43a8 | ||
|
|
7b763faa1c | ||
|
|
4a79857b5e | ||
|
|
61df3b9080 | ||
|
|
0b134aa711 | ||
|
|
ea71477a9d | ||
|
|
bc26848cee | ||
|
|
3e50ee70be | ||
|
|
0d8a20b05e | ||
|
|
74d63c2fd3 | ||
|
|
a0622a2ee0 | ||
|
|
a25a910ba0 | ||
|
|
0e5ce57fd5 | ||
|
|
e86c07547d | ||
|
|
d7b5a27d1d | ||
|
|
eee8fdc438 | ||
|
|
5a91802807 | ||
|
|
7fd56da5b7 | ||
|
|
eefa5d6cb5 | ||
|
|
845973273a | ||
|
|
0696497ffa | ||
|
|
babd29da50 | ||
|
|
c4e2d24ca9 | ||
|
|
2c13361b09 | ||
|
|
e8740b40da | ||
|
|
e45fc994aa | ||
|
|
ed3f5fba1f | ||
|
|
d0e8a4d26f | ||
|
|
60a5f70b80 | ||
|
|
fa9acb6a98 | ||
|
|
621d42d0c7 | ||
|
|
cfdaee4725 | ||
|
|
306ba86709 | ||
|
|
b47fd61f18 | ||
|
|
c10ff27600 | ||
|
|
9056f31f11 | ||
|
|
513232cdb3 | ||
|
|
30893dd99a | ||
|
|
b7c2411b50 | ||
|
|
002e833993 | ||
|
|
02c9296db4 | ||
|
|
562065c427 | ||
|
|
9d00bcb286 | ||
|
|
a009a1a62a | ||
|
|
f7692d5699 | ||
|
|
12e1768fdb | ||
|
|
28a3be3eca | ||
|
|
33500b5169 | ||
|
|
91011a8c5f | ||
|
|
4570c9de5b | ||
|
|
e7507dcd08 | ||
|
|
4711758593 | ||
|
|
b55e9e8248 | ||
|
|
2e62cfea3e | ||
|
|
286e1147d6 | ||
|
|
7e3d228d19 | ||
|
|
a1ab42c8c0 | ||
|
|
57a2f2ddde | ||
|
|
b7b7b2fac4 | ||
|
|
5ac1167250 | ||
|
|
fcc4185132 | ||
|
|
943c8091c2 | ||
|
|
a7a6f2aaef | ||
|
|
5aaa82fb69 | ||
|
|
bc33d5f421 | ||
|
|
0e4e232ad1 | ||
|
|
145d44cd97 | ||
|
|
e797a89fe1 | ||
|
|
254a2bc3ea | ||
|
|
873652a33e | ||
|
|
3f686734ab | ||
|
|
e1373d5408 | ||
|
|
a2054564d8 | ||
|
|
ea75dbf7bd | ||
|
|
63a67e525b | ||
|
|
95aaaeb5af | ||
|
|
a0bd53148b | ||
|
|
3302b176fd | ||
|
|
4672346d9c | ||
|
|
4182e97761 | ||
|
|
5598de88ff | ||
|
|
d88ecf0af0 | ||
|
|
516f0461b8 | ||
|
|
b8cf7c3835 | ||
|
|
707684a7b7 | ||
|
|
739aebbd18 | ||
|
|
71c053c391 | ||
|
|
9a0a4eceaf | ||
|
|
19f8e9147d | ||
|
|
e99750fd3c | ||
|
|
33cde87775 | ||
|
|
e63597bf79 | ||
|
|
2f63342677 | ||
|
|
04eae7316f | ||
|
|
67d6c086d8 | ||
|
|
69370c9c8f | ||
|
|
9948785220 | ||
|
|
4047e025e0 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -10,3 +10,8 @@ updates:
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -34,6 +34,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
@@ -41,6 +42,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
|
||||
40
.github/workflows/bootstrap.yml
vendored
40
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -42,8 +42,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -80,8 +80,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -145,8 +145,8 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -158,13 +158,13 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -179,11 +179,11 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -265,6 +265,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -283,7 +284,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -302,8 +303,8 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -316,10 +317,11 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -333,13 +335,13 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
12
.github/workflows/build-containers.yml
vendored
12
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
7
.github/workflows/style/requirements.txt
vendored
Normal file
7
.github/workflows/style/requirements.txt
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
black==23.9.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.6.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.5.2
|
||||
26
.github/workflows/unit_tests.yaml
vendored
26
.github/workflows/unit_tests.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
@@ -45,12 +45,16 @@ jobs:
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -94,10 +98,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -133,7 +137,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,10 +156,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -185,12 +189,12 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
17
.github/workflows/valid-style.yml
vendored
17
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,16 +35,17 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -68,7 +69,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
12
.github/workflows/windows_python.yml
vendored
12
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -39,10 +39,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -63,10 +63,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
54
CITATION.cff
54
CITATION.cff
@@ -27,12 +27,53 @@
|
||||
# And here's the CITATION.cff format:
|
||||
#
|
||||
cff-version: 1.2.0
|
||||
type: software
|
||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
abstract: >-
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
||||
preferred-citation:
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
type: conference-paper
|
||||
doi: "10.1145/2807591.2807623"
|
||||
url: "https://github.com/spack/spack"
|
||||
url: "https://tgamblin.github.io/pubs/spack-sc15.pdf"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
given-names: "Matthew"
|
||||
- family-names: "Collette"
|
||||
given-names: "Michael R."
|
||||
- family-names: "Lee"
|
||||
given-names: "Gregory L."
|
||||
- family-names: "Moody"
|
||||
given-names: "Adam"
|
||||
- family-names: "de Supinski"
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "US"
|
||||
date-start: 2015-11-15
|
||||
date-end: 2015-11-20
|
||||
month: 11
|
||||
year: 2015
|
||||
identifiers:
|
||||
- description: "The concept DOI of the work."
|
||||
type: doi
|
||||
value: 10.1145/2807591.2807623
|
||||
- description: "The DOE Document Release Number of the work"
|
||||
type: other
|
||||
value: "LLNL-CONF-669890"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
@@ -47,12 +88,3 @@ preferred-citation:
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "USA"
|
||||
month: November 15-20
|
||||
year: 2015
|
||||
notes: LLNL-CONF-669890
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -62,7 +63,10 @@ Resources:
|
||||
|
||||
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
|
||||
To get an invitation, visit [slack.spack.io](https://slack.spack.io).
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
not just for discussions, also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
|
||||
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.5'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'github-actions-v0.3'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
github-actions-v0.3: true
|
||||
spack-install: true
|
||||
|
||||
@@ -41,4 +41,4 @@ concretizer:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
strategy: minimal
|
||||
|
||||
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
package_list.html
|
||||
command_index.rst
|
||||
spack*.rst
|
||||
llnl*.rst
|
||||
|
||||
@@ -45,7 +45,8 @@ Listing available packages
|
||||
|
||||
To install software with Spack, you need to know what software is
|
||||
available. You can see a list of available package names at the
|
||||
:ref:`package-list` webpage, or using the ``spack list`` command.
|
||||
`packages.spack.io <https://packages.spack.io>`_ website, or
|
||||
using the ``spack list`` command.
|
||||
|
||||
.. _cmd-spack-list:
|
||||
|
||||
@@ -60,7 +61,7 @@ can install:
|
||||
:ellipsis: 10
|
||||
|
||||
There are thousands of them, so we've truncated the output above, but you
|
||||
can find a :ref:`full list here <package-list>`.
|
||||
can find a `full list here <https://packages.spack.io>`_.
|
||||
Packages are listed by name in alphabetical order.
|
||||
A pattern to match with no wildcards, ``*`` or ``?``,
|
||||
will be treated as though it started and ended with
|
||||
|
||||
@@ -3,6 +3,103 @@
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
.. _concretizer-options:
|
||||
|
||||
==========================================
|
||||
Concretization Settings (concretizer.yaml)
|
||||
==========================================
|
||||
|
||||
The ``concretizer.yaml`` configuration file allows to customize aspects of the
|
||||
algorithm used to select the dependencies you install. The default configuration
|
||||
is the following:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
--------------------------------
|
||||
Reuse already installed packages
|
||||
--------------------------------
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
------------------------------------------
|
||||
Selection of the target microarchitectures
|
||||
------------------------------------------
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
---------------
|
||||
Duplicate nodes
|
||||
---------------
|
||||
|
||||
The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of
|
||||
the same package. This is mainly relevant for build dependencies, which may have their version
|
||||
pinned by some nodes, and thus be required at different versions by different nodes in the same
|
||||
DAG.
|
||||
|
||||
The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``,
|
||||
then a single configuration per package is allowed in the DAG. This means, for instance, that only
|
||||
a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly
|
||||
faster concretization, at the expense of making a few specs unsolvable.
|
||||
|
||||
If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates.
|
||||
This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges
|
||||
of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`,
|
||||
while `py-numpy` still needs an older version:
|
||||
|
||||
.. figure:: images/shapely_duplicates.svg
|
||||
:scale: 70 %
|
||||
:align: center
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
|
||||
.. _build-settings:
|
||||
|
||||
================================
|
||||
@@ -232,76 +329,6 @@ Specific limitations include:
|
||||
then Spack will not add a new external entry (``spack config blame packages``
|
||||
can help locate all external entries).
|
||||
|
||||
.. _concretizer-options:
|
||||
|
||||
----------------------
|
||||
Concretizer options
|
||||
----------------------
|
||||
|
||||
``packages.yaml`` gives the concretizer preferences for specific packages,
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-requirements:
|
||||
|
||||
--------------------
|
||||
|
||||
@@ -25,8 +25,8 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. See the :ref:`package-list` for the full list of available
|
||||
oneAPI packages or use::
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
||||
|
||||
@@ -48,9 +48,6 @@
|
||||
os.environ["COLIFY_SIZE"] = "25x120"
|
||||
os.environ["COLUMNS"] = "120"
|
||||
|
||||
# Generate full package list if needed
|
||||
subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
|
||||
|
||||
# Generate a command index if an update is needed
|
||||
subprocess.call(
|
||||
[
|
||||
@@ -214,6 +211,7 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
||||
@@ -212,18 +212,12 @@ under the ``container`` attribute of environments:
|
||||
final:
|
||||
- libgomp
|
||||
|
||||
# Extra instructions
|
||||
extra_instructions:
|
||||
final: |
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
|
||||
|
||||
# Labels for the image
|
||||
labels:
|
||||
app: "gromacs"
|
||||
mpi: "mpich"
|
||||
|
||||
A detailed description of the options available can be found in the
|
||||
:ref:`container_config_options` section.
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
|
||||
-------------------
|
||||
Setting Base Images
|
||||
@@ -525,6 +519,13 @@ the example below:
|
||||
COPY data /share/myapp/data
|
||||
{% endblock %}
|
||||
|
||||
The Dockerfile is generated by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e /opt/environment containerize
|
||||
|
||||
Note that the environment must be active for spack to read the template.
|
||||
The recipe that gets generated contains the two extra instruction that we added in our template extension:
|
||||
|
||||
.. code-block:: Dockerfile
|
||||
|
||||
@@ -310,53 +310,11 @@ Once all of the dependencies are installed, you can try building the documentati
|
||||
$ make clean
|
||||
$ make
|
||||
|
||||
If you see any warning or error messages, you will have to correct those before
|
||||
your PR is accepted.
|
||||
|
||||
If you are editing the documentation, you should obviously be running the
|
||||
documentation tests. But even if you are simply adding a new package, your
|
||||
changes could cause the documentation tests to fail:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
|
||||
|
||||
At first, this error message will mean nothing to you, since you didn't edit
|
||||
that file. Until you look at line 8745 of the file in question:
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
Description:
|
||||
NetCDF is a set of software libraries and self-describing, machine-
|
||||
independent data formats that support the creation, access, and sharing
|
||||
of array-oriented scientific data.
|
||||
|
||||
Our documentation includes :ref:`a list of all Spack packages <package-list>`.
|
||||
If you add a new package, its docstring is added to this page. The problem in
|
||||
this case was that the docstring looked like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""
|
||||
NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data.
|
||||
"""
|
||||
|
||||
Docstrings cannot start with a newline character, or else Sphinx will complain.
|
||||
Instead, they should look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data."""
|
||||
|
||||
Documentation changes can result in much more obfuscated warning messages.
|
||||
If you don't understand what they mean, feel free to ask when you submit
|
||||
your PR.
|
||||
If you see any warning or error messages, you will have to correct those before your PR
|
||||
is accepted. If you are editing the documentation, you should be running the
|
||||
documentation tests to make sure there are no errors. Documentation changes can result
|
||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||
to ask when you submit your PR.
|
||||
|
||||
--------
|
||||
Coverage
|
||||
|
||||
2784
lib/spack/docs/images/shapely_duplicates.svg
Normal file
2784
lib/spack/docs/images/shapely_duplicates.svg
Normal file
File diff suppressed because it is too large
Load Diff
|
After Width: | Height: | Size: 108 KiB |
@@ -54,9 +54,16 @@ or refer to the full manual below.
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
replace_conda_homebrew
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Links
|
||||
|
||||
Tutorial (spack-tutorial.rtfd.io) <https://spack-tutorial.readthedocs.io>
|
||||
Packages (packages.spack.io) <https://packages.spack.io>
|
||||
Binaries (binaries.spack.io) <https://cache.spack.io>
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Reference
|
||||
@@ -72,7 +79,6 @@ or refer to the full manual below.
|
||||
repositories
|
||||
binary_caches
|
||||
command_index
|
||||
package_list
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _package-list:
|
||||
|
||||
============
|
||||
Package List
|
||||
============
|
||||
|
||||
This is a list of things you can install using Spack. It is
|
||||
automatically generated based on the packages in this Spack
|
||||
version.
|
||||
|
||||
.. raw:: html
|
||||
:file: package_list.html
|
||||
@@ -3635,7 +3635,8 @@ regardless of the build system. The arguments for the phase are:
|
||||
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
|
||||
correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
|
||||
|
||||
If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly:
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -3645,56 +3646,6 @@ If the ``package.py`` encodes builders explicitly, the signature for a phase cha
|
||||
|
||||
In this case the package is passed as the second argument, and ``self`` is the builder instance.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are cases where a software actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases natively, if a recipe is written using builders explicitly.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default. The ``package.py``
|
||||
will likely contain some overriding of default builder methods:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
|
||||
In more complex cases it might happen that the build system changes according to certain conditions,
|
||||
for instance across versions. That can be expressed with conditional variant values:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Mixin base classes
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -3741,6 +3692,106 @@ for instance:
|
||||
|
||||
In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
----------------------
|
||||
Multiple build systems
|
||||
----------------------
|
||||
|
||||
There are cases where a package actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases by splitting the build instructions into separate builder classes.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
variant("my_feature", default=True)
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default.
|
||||
|
||||
Additional build instructions are split into separate builder classes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
return [
|
||||
self.define_from_variant("MY_FEATURE", "my_feature")
|
||||
]
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
return self.with_or_without("my-feature", variant="my_feature")
|
||||
|
||||
In this example, ``spack install example +feature build_sytem=cmake`` will
|
||||
pick the ``CMakeBuilder`` and invoke ``cmake -DMY_FEATURE:BOOL=ON``.
|
||||
|
||||
Similarly, ``spack install example +feature build_system=autotools`` will pick
|
||||
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
|
||||
|
||||
Dependencies are always specified in the package class. When some dependencies
|
||||
depend on the choice of the build system, it is possible to use when conditions as
|
||||
usual:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
# Runtime dependencies
|
||||
depends_on("ncurses")
|
||||
depends_on("libxml2")
|
||||
|
||||
# Lowerbounds for cmake only apply when using cmake as the build system
|
||||
with when("build_system=cmake"):
|
||||
depends_on("cmake@3.18:", when="@2.0:", type="build")
|
||||
depends_on("cmake@3:", type="build")
|
||||
|
||||
# Specify extra build dependencies used only in the configure script
|
||||
with when("build_system=autotools"):
|
||||
depends_on("perl", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
Very often projects switch from one build system to another, or add support
|
||||
for a new build system from a certain version, which means that the choice
|
||||
of the build system typically depends on a version range. Those situations can
|
||||
be handled by using conditional values in the ``build_system`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
The ``build_system`` can be used as an ordinary variant, which also means that it can
|
||||
be used in ``depends_on`` statements. This can be useful when a package *requires* that
|
||||
its dependency has a CMake config file, meaning that the dependent can only build when the
|
||||
dependency is built with CMake, and not Autotools. In that case, you can force the choice
|
||||
of the build system in the dependent:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Dependent(CMakePackage):
|
||||
|
||||
depends_on("example build_system=cmake")
|
||||
|
||||
|
||||
.. _install-environment:
|
||||
|
||||
-----------------------
|
||||
@@ -6196,7 +6247,100 @@ follows:
|
||||
"foo-package@{0}".format(version_str)
|
||||
)
|
||||
|
||||
.. _package-lifecycle:
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Add detection tests to packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To ensure that software is detected correctly for multiple configurations
|
||||
and on different systems users can write a ``detection_test.yaml`` file and
|
||||
put it in the package directory alongside the ``package.py`` file.
|
||||
This YAML file contains enough information for Spack to mock an environment
|
||||
and try to check if the detection logic yields the results that are expected.
|
||||
|
||||
As a general rule, attributes at the top-level of ``detection_test.yaml``
|
||||
represent search mechanisms and they each map to a list of tests that should confirm
|
||||
the validity of the package's detection logic.
|
||||
|
||||
The detection tests can be run with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack audit externals
|
||||
|
||||
Errors that have been detected are reported to screen.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Tests for PATH inspections
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
Detection tests insisting on ``PATH`` inspections are listed under
|
||||
the ``paths`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang-3.9"
|
||||
- "bin/clang++-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
``results``. The exact details on how to specify both the ``layout`` and the
|
||||
``results`` are reported in the table below:
|
||||
|
||||
.. list-table:: Test based on PATH inspections
|
||||
:header-rows: 1
|
||||
|
||||
* - Option Name
|
||||
- Description
|
||||
- Allowed Values
|
||||
- Required Field
|
||||
* - ``layout``
|
||||
- Specifies the filesystem tree used for the test
|
||||
- List of objects
|
||||
- Yes
|
||||
* - ``layout:[0]:executables``
|
||||
- Relative paths for the mock executables to be created
|
||||
- List of strings
|
||||
- Yes
|
||||
* - ``layout:[0]:script``
|
||||
- Mock logic for the executable
|
||||
- Any valid shell script
|
||||
- Yes
|
||||
* - ``results``
|
||||
- List of expected results
|
||||
- List of objects (empty if no result is expected)
|
||||
- Yes
|
||||
* - ``results:[0]:spec``
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
When using a custom repository, it is possible to customize a package that already exists in ``builtin``
|
||||
and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized
|
||||
``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for
|
||||
``myrepo.llvm`` might look like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
includes:
|
||||
- "builtin.llvm"
|
||||
|
||||
This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to
|
||||
those locally defined in the file.
|
||||
|
||||
-----------------------------
|
||||
Style guidelines for packages
|
||||
@@ -6655,3 +6799,30 @@ To achieve backward compatibility with the single-class format Spack creates in
|
||||
Overall the role of the adapter is to route access to attributes of methods first through the ``*Package``
|
||||
hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where
|
||||
the adapter role is to "emulate" a method resolution order like the one represented by the red arrows.
|
||||
|
||||
------------------------------
|
||||
Specifying License Information
|
||||
------------------------------
|
||||
|
||||
A significant portion of software that Spack packages is open source. Most open
|
||||
source software is released under one or more common open source licenses.
|
||||
Specifying the specific license that a package is released under in a project's
|
||||
`package.py` is good practice. To specify a license, find the SPDX identifier for
|
||||
a project and then add it using the license directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("<SPDX Identifier HERE>")
|
||||
|
||||
Note that specifying a license without a when clause makes it apply to all
|
||||
versions and variants of the package, which might not actually be the case.
|
||||
For example, a project might have switched licenses at some point or have
|
||||
certain build configurations that include files that are licensed differently.
|
||||
To account for this, you can specify when licenses should be applied. For
|
||||
example, to specify that a specific license identifier should only apply
|
||||
to versionup to and including 1.5, you could write the following directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("...", when="@:1.5")
|
||||
|
||||
|
||||
@@ -213,6 +213,16 @@ pipeline jobs.
|
||||
``spack ci generate``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Throughout this documentation, references to the "mirror" mean the target
|
||||
mirror which is checked for the presence of up-to-date specs, and where
|
||||
any scheduled jobs should push built binary packages. In the past, this
|
||||
defaulted to the mirror at index 0 in the mirror configs, and could be
|
||||
overridden using the ``--buildcache-destination`` argument. Starting with
|
||||
Spack 0.23, ``spack ci generate`` will require you to identify this mirror
|
||||
by the name "buildcache-destination". While you can configure any number
|
||||
of mirrors as sources for your pipelines, you will need to identify the
|
||||
destination mirror by name.
|
||||
|
||||
Concretizes the specs in the active environment, stages them (as described in
|
||||
:ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk.
|
||||
During concretization of the environment, ``spack ci generate`` also writes a
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
=====================================
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
Spack for Homebrew/Conda Users
|
||||
=====================================
|
||||
|
||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||
@@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -212,9 +212,9 @@ package versions, simply run the following commands:
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
@@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
|
||||
--------------
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.5
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
python-levenshtein==0.23.0
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
urllib3==2.0.7
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
black==23.9.1
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
mypy==1.6.1
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 3.6--3.11, , Interpreter for Spack
|
||||
Python, 3.6--3.12, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.2.0"
|
||||
__version__ = "0.2.1"
|
||||
|
||||
@@ -79,14 +79,18 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
self.generation = generation
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
"""All the ancestors of this microarchitecture."""
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
return value
|
||||
if self._ancestors is None:
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
self._ancestors = value
|
||||
return self._ancestors
|
||||
|
||||
def _to_set(self):
|
||||
"""Returns a set of the nodes in this microarchitecture DAG."""
|
||||
|
||||
@@ -145,6 +145,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune=generic -mpopcnt"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -217,6 +224,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name} -fma -mf16c"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -300,6 +314,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -1412,6 +1433,92 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"sapphirerapids": {
|
||||
"from": [
|
||||
"icelake"
|
||||
],
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"popcnt",
|
||||
"aes",
|
||||
"pclmulqdq",
|
||||
"avx",
|
||||
"rdrand",
|
||||
"f16c",
|
||||
"movbe",
|
||||
"fma",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"rdseed",
|
||||
"adx",
|
||||
"clflushopt",
|
||||
"xsavec",
|
||||
"xsaveopt",
|
||||
"avx512f",
|
||||
"avx512vl",
|
||||
"avx512bw",
|
||||
"avx512dq",
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha_ni",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
"avx512_vbmi2",
|
||||
"avx512_vpopcntdq",
|
||||
"avx512_bitalg",
|
||||
"avx512_vnni",
|
||||
"vpclmulqdq",
|
||||
"vaes",
|
||||
"avx512_bf16",
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"k10": {
|
||||
"from": ["x86_64"],
|
||||
"vendor": "AuthenticAMD",
|
||||
@@ -2065,8 +2172,6 @@
|
||||
"pku",
|
||||
"gfni",
|
||||
"flush_l1d",
|
||||
"erms",
|
||||
"avic",
|
||||
"avx512f",
|
||||
"avx512dq",
|
||||
"avx512ifma",
|
||||
@@ -2083,12 +2188,12 @@
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.3:13.0",
|
||||
"versions": "10.3:12.2",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "13.1:",
|
||||
"versions": "12.3:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
|
||||
105
lib/spack/llnl/path.py
Normal file
105
lib/spack/llnl/path.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Path primitives that just require Python standard library."""
|
||||
import functools
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
class Path:
|
||||
"""Enum to identify the path-style."""
|
||||
|
||||
unix: int = 0
|
||||
windows: int = 1
|
||||
platform_path: int = windows if sys.platform == "win32" else unix
|
||||
|
||||
|
||||
def format_os_path(path: str, mode: int = Path.unix) -> str:
|
||||
"""Formats the input path to use consistent, platform specific separators.
|
||||
|
||||
Absolute paths are converted between drive letters and a prepended '/' as per platform
|
||||
requirement.
|
||||
|
||||
Parameters:
|
||||
path: the path to be normalized, must be a string or expose the replace method.
|
||||
mode: the path file separator style to normalize the passed path to.
|
||||
Default is unix style, i.e. '/'
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
if mode == Path.windows:
|
||||
path = path.replace("/", "\\")
|
||||
else:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def convert_to_posix_path(path: str) -> str:
|
||||
"""Converts the input path to POSIX style."""
|
||||
return format_os_path(path, mode=Path.unix)
|
||||
|
||||
|
||||
def convert_to_windows_path(path: str) -> str:
|
||||
"""Converts the input path to Windows style."""
|
||||
return format_os_path(path, mode=Path.windows)
|
||||
|
||||
|
||||
def convert_to_platform_path(path: str) -> str:
|
||||
"""Converts the input path to the current platform's native style."""
|
||||
return format_os_path(path, mode=Path.platform_path)
|
||||
|
||||
|
||||
def path_to_os_path(*parameters: str) -> List[str]:
|
||||
"""Takes an arbitrary number of positional parameters, converts each argument of type
|
||||
string to use a normalized filepath separator, and returns a list of all values.
|
||||
"""
|
||||
|
||||
def _is_url(path_or_url: str) -> bool:
|
||||
if "\\" in path_or_url:
|
||||
return False
|
||||
url_tuple = urlparse(path_or_url)
|
||||
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
|
||||
|
||||
result = []
|
||||
for item in parameters:
|
||||
if isinstance(item, str) and not _is_url(item):
|
||||
item = convert_to_platform_path(item)
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||
"""Filters function arguments to account for platform path separators.
|
||||
Optional slicing range can be specified to select specific arguments
|
||||
|
||||
This decorator takes all (or a slice) of a method's positional arguments
|
||||
and normalizes usage of filepath separators on a per platform basis.
|
||||
|
||||
Note: `**kwargs`, urls, and any type that is not a string are ignored
|
||||
so in such cases where path normalization is required, that should be
|
||||
handled by calling path_to_os_path directly as needed.
|
||||
|
||||
Parameters:
|
||||
arg_slice: a slice object specifying the slice of arguments
|
||||
in the decorated method over which filepath separators are
|
||||
normalized
|
||||
"""
|
||||
|
||||
def holder_func(func):
|
||||
@functools.wraps(func)
|
||||
def path_filter_caller(*args, **kwargs):
|
||||
args = list(args)
|
||||
if arg_slice:
|
||||
args[arg_slice] = path_to_os_path(*args[arg_slice])
|
||||
else:
|
||||
args = path_to_os_path(*args)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return path_filter_caller
|
||||
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
67
lib/spack/llnl/string.py
Normal file
67
lib/spack/llnl/string.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""String manipulation functions that do not have other dependencies than Python
|
||||
standard library
|
||||
"""
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
def comma_list(sequence: List[str], article: str = "") -> str:
|
||||
if type(sequence) is not list:
|
||||
sequence = list(sequence)
|
||||
|
||||
if not sequence:
|
||||
return ""
|
||||
if len(sequence) == 1:
|
||||
return sequence[0]
|
||||
|
||||
out = ", ".join(str(s) for s in sequence[:-1])
|
||||
if len(sequence) != 2:
|
||||
out += "," # oxford comma
|
||||
out += " "
|
||||
if article:
|
||||
out += article + " "
|
||||
out += str(sequence[-1])
|
||||
return out
|
||||
|
||||
|
||||
def comma_or(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'or').
|
||||
"""
|
||||
return comma_list(sequence, "or")
|
||||
|
||||
|
||||
def comma_and(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'and').
|
||||
"""
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
||||
|
||||
def plural(n: int, singular: str, plural: Optional[str] = None, show_n: bool = True) -> str:
|
||||
"""Pluralize <singular> word by adding an s if n != 1.
|
||||
|
||||
Arguments:
|
||||
n: number of things there are
|
||||
singular: singular form of word
|
||||
plural: optional plural form, for when it's not just singular + 's'
|
||||
show_n: whether to include n in the result string (default True)
|
||||
|
||||
Returns:
|
||||
"1 thing" if n == 1 or "n things" if n != 1
|
||||
"""
|
||||
number = f"{n} " if show_n else ""
|
||||
if n == 1:
|
||||
return f"{number}{singular}"
|
||||
elif plural is not None:
|
||||
return f"{number}{plural}"
|
||||
else:
|
||||
return f"{number}{singular}s"
|
||||
459
lib/spack/llnl/url.py
Normal file
459
lib/spack/llnl/url.py
Normal file
@@ -0,0 +1,459 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
tuple(".".join(ext) for ext in itertools.product(PREFIX_EXTENSIONS, EXTENSIONS))
|
||||
+ PREFIX_EXTENSIONS
|
||||
+ EXTENSIONS
|
||||
+ NO_TAR_EXTENSIONS
|
||||
)
|
||||
CONTRACTION_MAP = {"tgz": "tar.gz", "txz": "tar.xz", "tbz": "tar.bz2", "tbz2": "tar.bz2"}
|
||||
|
||||
|
||||
def find_list_urls(url: str) -> Set[str]:
|
||||
r"""Find good list URLs for the supplied URL.
|
||||
|
||||
By default, returns the dirname of the archive path.
|
||||
|
||||
Provides special treatment for the following websites, which have a
|
||||
unique list URL different from the dirname of the download URL:
|
||||
|
||||
========= =======================================================
|
||||
GitHub https://github.com/<repo>/<name>/releases
|
||||
GitLab https://gitlab.\*/<repo>/<name>/tags
|
||||
BitBucket https://bitbucket.org/<repo>/<name>/downloads/?tab=tags
|
||||
CRAN https://\*.r-project.org/src/contrib/Archive/<name>
|
||||
PyPI https://pypi.org/simple/<name>/
|
||||
LuaRocks https://luarocks.org/modules/<repo>/<name>
|
||||
========= =======================================================
|
||||
|
||||
Note: this function is called by `spack versions`, `spack checksum`,
|
||||
and `spack create`, but not by `spack fetch` or `spack install`.
|
||||
|
||||
Parameters:
|
||||
url (str): The download URL for the package
|
||||
|
||||
Returns:
|
||||
set: One or more list URLs for the package
|
||||
"""
|
||||
|
||||
url_types = [
|
||||
# GitHub
|
||||
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
|
||||
(r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
|
||||
# GitLab API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
|
||||
(
|
||||
r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
|
||||
lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
|
||||
),
|
||||
# GitLab non-API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
|
||||
(r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
|
||||
# BitBucket
|
||||
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
|
||||
(r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
|
||||
# CRAN
|
||||
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
|
||||
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
|
||||
(
|
||||
r"(.*\.r-project\.org/src/contrib)/([^_]+)",
|
||||
lambda m: m.group(1) + "/Archive/" + m.group(2),
|
||||
),
|
||||
# PyPI
|
||||
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
|
||||
(
|
||||
r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
|
||||
lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
|
||||
),
|
||||
# LuaRocks
|
||||
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
|
||||
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
|
||||
(
|
||||
r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
|
||||
+ r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
|
||||
lambda m: "https://luarocks.org/modules/"
|
||||
+ m.group("org")
|
||||
+ "/"
|
||||
+ m.group("name")
|
||||
+ "/",
|
||||
),
|
||||
]
|
||||
|
||||
list_urls = {os.path.dirname(url)}
|
||||
|
||||
for pattern, fun in url_types:
|
||||
match = re.search(pattern, url)
|
||||
if match:
|
||||
list_urls.add(fun(match))
|
||||
|
||||
return list_urls
|
||||
|
||||
|
||||
def strip_query_and_fragment(url: str) -> Tuple[str, str]:
|
||||
"""Strips query and fragment from a url, then returns the base url and the suffix.
|
||||
|
||||
Args:
|
||||
url: URL to be stripped
|
||||
|
||||
Raises:
|
||||
ValueError: when there is any error parsing the URL
|
||||
"""
|
||||
components = urlsplit(url)
|
||||
stripped = components[:3] + (None, None)
|
||||
|
||||
query, frag = components[3:5]
|
||||
suffix = ""
|
||||
if query:
|
||||
suffix += "?" + query
|
||||
if frag:
|
||||
suffix += "#" + frag
|
||||
|
||||
return urlunsplit(stripped), suffix
|
||||
|
||||
|
||||
SOURCEFORGE_RE = re.compile(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$")
|
||||
|
||||
|
||||
def split_url_on_sourceforge_suffix(url: str) -> Tuple[str, ...]:
|
||||
"""If the input is a sourceforge URL, returns base URL and "/download" suffix. Otherwise,
|
||||
returns the input URL and an empty string.
|
||||
"""
|
||||
match = SOURCEFORGE_RE.search(url)
|
||||
if match is not None:
|
||||
return match.groups()
|
||||
return url, ""
|
||||
|
||||
|
||||
def has_extension(path_or_url: str, ext: str) -> bool:
|
||||
"""Returns true if the extension in input is present in path, false otherwise."""
|
||||
prefix, _ = split_url_on_sourceforge_suffix(path_or_url)
|
||||
if not ext.startswith(r"\."):
|
||||
ext = rf"\.{ext}$"
|
||||
|
||||
if re.search(ext, prefix):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def extension_from_path(path_or_url: Optional[str]) -> Optional[str]:
|
||||
"""Tries to match an allowed archive extension to the input. Returns the first match,
|
||||
or None if no match was found.
|
||||
|
||||
Raises:
|
||||
ValueError: if the input is None
|
||||
"""
|
||||
if path_or_url is None:
|
||||
raise ValueError("Can't call extension() on None")
|
||||
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, t):
|
||||
return t
|
||||
return None
|
||||
|
||||
|
||||
def remove_extension(path_or_url: str, *, extension: str) -> str:
|
||||
"""Returns the input with the extension removed"""
|
||||
suffix = rf"\.{extension}$"
|
||||
return re.sub(suffix, "", path_or_url)
|
||||
|
||||
|
||||
def check_and_remove_ext(path: str, *, extension: str) -> str:
|
||||
"""Returns the input path with the extension removed, if the extension is present in path.
|
||||
Otherwise, returns the input unchanged.
|
||||
"""
|
||||
if not has_extension(path, extension):
|
||||
return path
|
||||
path, _ = split_url_on_sourceforge_suffix(path)
|
||||
return remove_extension(path, extension=extension)
|
||||
|
||||
|
||||
def strip_extension(path_or_url: str, *, extension: Optional[str] = None) -> str:
|
||||
"""If a path contains the extension in input, returns the path stripped of the extension.
|
||||
Otherwise, returns the input path.
|
||||
|
||||
If extension is None, attempts to strip any allowed extension from path.
|
||||
"""
|
||||
if extension is None:
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, ext=t):
|
||||
extension = t
|
||||
break
|
||||
else:
|
||||
return path_or_url
|
||||
|
||||
return check_and_remove_ext(path_or_url, extension=extension)
|
||||
|
||||
|
||||
def split_url_extension(url: str) -> Tuple[str, ...]:
|
||||
"""Some URLs have a query string, e.g.:
|
||||
|
||||
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
|
||||
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
|
||||
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
||||
|
||||
In (1), the query string needs to be stripped to get at the
|
||||
extension, but in (2) & (3), the filename is IN a single final query
|
||||
argument.
|
||||
|
||||
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
|
||||
The suffix contains anything that was stripped off the URL to
|
||||
get at the file extension. In (1), it will be ``'?raw=true'``, but
|
||||
in (2), it will be empty. In (3) the suffix is a parameter that follows
|
||||
after the file extension, e.g.:
|
||||
|
||||
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
|
||||
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
|
||||
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
|
||||
"""
|
||||
# Strip off sourceforge download suffix.
|
||||
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
|
||||
prefix, suffix = split_url_on_sourceforge_suffix(url)
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
if ext is not None:
|
||||
prefix = strip_extension(prefix)
|
||||
return prefix, ext, suffix
|
||||
|
||||
try:
|
||||
prefix, suf = strip_query_and_fragment(prefix)
|
||||
except ValueError:
|
||||
# FIXME: tty.debug("Got error parsing path %s" % path)
|
||||
# Ignore URL parse errors here
|
||||
return url, ""
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
prefix = strip_extension(prefix)
|
||||
suffix = suf + suffix
|
||||
if ext is None:
|
||||
ext = ""
|
||||
|
||||
return prefix, ext, suffix
|
||||
|
||||
|
||||
def strip_version_suffixes(path_or_url: str) -> str:
|
||||
"""Some tarballs contain extraneous information after the version:
|
||||
|
||||
* ``bowtie2-2.2.5-source``
|
||||
* ``libevent-2.0.21-stable``
|
||||
* ``cuda_8.0.44_linux.run``
|
||||
|
||||
These strings are not part of the version number and should be ignored.
|
||||
This function strips those suffixes off and returns the remaining string.
|
||||
The goal is that the version is always the last thing in ``path``:
|
||||
|
||||
* ``bowtie2-2.2.5``
|
||||
* ``libevent-2.0.21``
|
||||
* ``cuda_8.0.44``
|
||||
|
||||
Args:
|
||||
path_or_url: The filename or URL for the package
|
||||
|
||||
Returns:
|
||||
The ``path`` with any extraneous suffixes removed
|
||||
"""
|
||||
# NOTE: This could be done with complicated regexes in parse_version_offset
|
||||
# NOTE: The problem is that we would have to add these regexes to the end
|
||||
# NOTE: of every single version regex. Easier to just strip them off
|
||||
# NOTE: permanently
|
||||
|
||||
suffix_regexes = [
|
||||
# Download type
|
||||
r"[Ii]nstall",
|
||||
r"all",
|
||||
r"code",
|
||||
r"[Ss]ources?",
|
||||
r"file",
|
||||
r"full",
|
||||
r"single",
|
||||
r"with[a-zA-Z_-]+",
|
||||
r"rock",
|
||||
r"src(_0)?",
|
||||
r"public",
|
||||
r"bin",
|
||||
r"binary",
|
||||
r"run",
|
||||
r"[Uu]niversal",
|
||||
r"jar",
|
||||
r"complete",
|
||||
r"dynamic",
|
||||
r"oss",
|
||||
r"gem",
|
||||
r"tar",
|
||||
r"sh",
|
||||
# Download version
|
||||
r"release",
|
||||
r"bin",
|
||||
r"stable",
|
||||
r"[Ff]inal",
|
||||
r"rel",
|
||||
r"orig",
|
||||
r"dist",
|
||||
r"\+",
|
||||
# License
|
||||
r"gpl",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)",
|
||||
# Other
|
||||
r"cpp",
|
||||
r"gtk",
|
||||
r"incubating",
|
||||
# OS
|
||||
r"[Ll]inux(_64)?",
|
||||
r"LINUX",
|
||||
r"[Uu]ni?x",
|
||||
r"[Ss]un[Oo][Ss]",
|
||||
r"[Mm]ac[Oo][Ss][Xx]?",
|
||||
r"[Oo][Ss][Xx]",
|
||||
r"[Dd]arwin(64)?",
|
||||
r"[Aa]pple",
|
||||
r"[Ww]indows",
|
||||
r"[Ww]in(64|32)?",
|
||||
r"[Cc]ygwin(64|32)?",
|
||||
r"[Mm]ingw",
|
||||
r"centos",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
# Remove the suffix from the end of the path
|
||||
# This may be done multiple times
|
||||
path_or_url = re.sub(r"[._-]?" + regex + "$", "", path_or_url)
|
||||
|
||||
return path_or_url
|
||||
|
||||
|
||||
def expand_contracted_extension(extension: str) -> str:
|
||||
"""Returns the expanded version of a known contracted extension.
|
||||
|
||||
This function maps extensions like ".tgz" to ".tar.gz". On unknown extensions,
|
||||
return the input unmodified.
|
||||
"""
|
||||
extension = extension.strip(".")
|
||||
return CONTRACTION_MAP.get(extension, extension)
|
||||
|
||||
|
||||
def expand_contracted_extension_in_path(
|
||||
path_or_url: str, *, extension: Optional[str] = None
|
||||
) -> str:
|
||||
"""Returns the input path or URL with any contraction extension expanded.
|
||||
|
||||
Args:
|
||||
path_or_url: path or URL to be expanded
|
||||
extension: if specified, only attempt to expand that extension
|
||||
"""
|
||||
extension = extension or extension_from_path(path_or_url)
|
||||
if extension is None:
|
||||
return path_or_url
|
||||
|
||||
expanded = expand_contracted_extension(extension)
|
||||
if expanded != extension:
|
||||
return re.sub(rf"{extension}", rf"{expanded}", path_or_url)
|
||||
return path_or_url
|
||||
|
||||
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
||||
|
||||
def strip_compression_extension(path_or_url: str, ext: Optional[str] = None) -> str:
|
||||
"""Strips the compression extension from the input, and returns it. For instance,
|
||||
"foo.tgz" becomes "foo.tar".
|
||||
|
||||
If no extension is given, try a default list of extensions.
|
||||
|
||||
Args:
|
||||
path_or_url: input to be stripped
|
||||
ext: if given, extension to be stripped
|
||||
"""
|
||||
if not extension_from_path(path_or_url):
|
||||
return path_or_url
|
||||
|
||||
expanded_path = expand_contracted_extension_in_path(path_or_url)
|
||||
candidates = [ext] if ext is not None else EXTENSIONS
|
||||
for current_extension in candidates:
|
||||
modified_path = check_and_remove_ext(expanded_path, extension=current_extension)
|
||||
if modified_path != expanded_path:
|
||||
return modified_path
|
||||
return expanded_path
|
||||
|
||||
|
||||
def allowed_archive(path_or_url: str) -> bool:
|
||||
"""Returns true if the input is a valid archive, False otherwise."""
|
||||
return (
|
||||
False if not path_or_url else any(path_or_url.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
)
|
||||
|
||||
|
||||
def determine_url_file_extension(path: str) -> str:
|
||||
"""This returns the type of archive a URL refers to. This is
|
||||
sometimes confusing because of URLs like:
|
||||
|
||||
(1) https://github.com/petdance/ack/tarball/1.93_02
|
||||
|
||||
Where the URL doesn't actually contain the filename. We need
|
||||
to know what type it is so that we can appropriately name files
|
||||
in mirrors.
|
||||
"""
|
||||
match = re.search(r"github.com/.+/(zip|tar)ball/", path)
|
||||
if match:
|
||||
if match.group(1) == "zip":
|
||||
return "zip"
|
||||
elif match.group(1) == "tar":
|
||||
return "tar.gz"
|
||||
|
||||
prefix, ext, suffix = split_url_extension(path)
|
||||
return ext
|
||||
@@ -11,6 +11,7 @@
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import pathlib
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
@@ -27,7 +28,8 @@
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
import grp
|
||||
@@ -154,6 +156,37 @@ def lookup(name):
|
||||
shutil.copystat = copystat
|
||||
|
||||
|
||||
def polite_path(components: Iterable[str]):
|
||||
"""
|
||||
Given a list of strings which are intended to be path components,
|
||||
generate a path, and format each component to avoid generating extra
|
||||
path entries.
|
||||
|
||||
For example all "/", "\", and ":" characters will be replaced with
|
||||
"_". Other characters like "=" will also be replaced.
|
||||
"""
|
||||
return os.path.join(*[polite_filename(x) for x in components])
|
||||
|
||||
|
||||
@memoized
|
||||
def _polite_antipattern():
|
||||
# A regex of all the characters we don't want in a filename
|
||||
return re.compile(r"[^A-Za-z0-9_.-]")
|
||||
|
||||
|
||||
def polite_filename(filename: str) -> str:
|
||||
"""
|
||||
Replace generally problematic filename characters with underscores.
|
||||
|
||||
This differs from sanitize_filename in that it is more aggressive in
|
||||
changing characters in the name. For example it removes "=" which can
|
||||
confuse path parsing in external tools.
|
||||
"""
|
||||
# This character set applies for both Windows and Linux. It does not
|
||||
# account for reserved filenames in Windows.
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid():
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
@@ -335,8 +368,7 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
for filename in path_to_os_path(*filenames):
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
@@ -2426,7 +2458,7 @@ def library_dependents(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
|
||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||
|
||||
def add_library_dependent(self, *dest):
|
||||
"""
|
||||
@@ -2439,9 +2471,9 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
self._additional_library_dependents.add(os.path.dirname)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
else:
|
||||
self._additional_library_dependents.add(pth)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
@@ -2454,7 +2486,7 @@ def rpaths(self):
|
||||
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||
for extra_path in self._addl_rpaths:
|
||||
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||
return set(dependent_libs)
|
||||
return set([pathlib.Path(x) for x in dependent_libs])
|
||||
|
||||
def add_rpath(self, *paths):
|
||||
"""
|
||||
@@ -2470,7 +2502,7 @@ def add_rpath(self, *paths):
|
||||
"""
|
||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||
|
||||
def _link(self, path, dest_dir):
|
||||
def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
|
||||
"""Perform link step of simulated rpathing, installing
|
||||
simlinks of file in path to the dest_dir
|
||||
location. This method deliberately prevents
|
||||
@@ -2478,27 +2510,35 @@ def _link(self, path, dest_dir):
|
||||
This is because it is both meaningless from an rpath
|
||||
perspective, and will cause an error when Developer
|
||||
mode is not enabled"""
|
||||
file_name = os.path.basename(path)
|
||||
dest_file = os.path.join(dest_dir, file_name)
|
||||
if os.path.exists(dest_dir) and not dest_file == path:
|
||||
|
||||
def report_already_linked():
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(str(dest_file))
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (str(path), str(dest_file))
|
||||
+ "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (str(file_name), str(dest_dir))
|
||||
)
|
||||
|
||||
file_name = path.name
|
||||
dest_file = dest_dir / file_name
|
||||
if not dest_file.exists() and dest_dir.exists() and not dest_file == path:
|
||||
try:
|
||||
symlink(path, dest_file)
|
||||
symlink(str(path), str(dest_file))
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
# associate with trying to create a file that already exists (winerror 183)
|
||||
# Catch OSErrors missed by the SymlinkError checks
|
||||
except OSError as e:
|
||||
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(dest_file)
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (file_name, dest_dir)
|
||||
)
|
||||
pass
|
||||
report_already_linked()
|
||||
else:
|
||||
raise e
|
||||
# catch errors we raise ourselves from Spack
|
||||
except llnl.util.symlink.AlreadyExistsError:
|
||||
report_already_linked()
|
||||
|
||||
def establish_link(self):
|
||||
"""
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.util.string
|
||||
from ..string import plural
|
||||
|
||||
if sys.platform != "win32":
|
||||
import fcntl
|
||||
@@ -169,7 +169,7 @@ def _attempts_str(wait_time, nattempts):
|
||||
if nattempts <= 1:
|
||||
return ""
|
||||
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
attempts = plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
|
||||
@@ -11,8 +11,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util.path import system_path_filter
|
||||
from ..path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
@@ -66,7 +65,9 @@ def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise SymlinkError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
raise AlreadyExistsError(
|
||||
f"Link path ({link_path}) already exists. Cannot create link."
|
||||
)
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
@@ -78,7 +79,7 @@ def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link patg ahead of link creation to prevent broken
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
@@ -234,7 +235,7 @@ def _windows_create_junction(source: str, link: str):
|
||||
elif not os.path.exists(source):
|
||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError("Link path already exists, cannot create a junction.")
|
||||
raise AlreadyExistsError("Link path already exists, cannot create a junction.")
|
||||
elif not os.path.isdir(source):
|
||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||
|
||||
@@ -259,7 +260,7 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
elif not os.path.exists(path):
|
||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
raise AlreadyExistsError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
elif not os.path.isfile(path):
|
||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||
else:
|
||||
@@ -336,7 +337,11 @@ def resolve_link_target_relative_to_the_link(link):
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
class SymlinkError(SpackError):
|
||||
class SymlinkError(RuntimeError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
"""
|
||||
|
||||
|
||||
class AlreadyExistsError(SymlinkError):
|
||||
"""Link path already exists."""
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
@@ -17,7 +17,9 @@ class ABI:
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(self, target, constraint):
|
||||
def architecture_compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec
|
||||
) -> bool:
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
@@ -34,7 +36,7 @@ def _gcc_get_libstdcxx_version(self, version):
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
|
||||
spec = CompilerSpec("gcc", version)
|
||||
spec = spack.spec.CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
@@ -77,16 +79,20 @@ def _gcc_compiler_compare(self, pversion, cversion):
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
def _intel_compiler_compare(self, pversion, cversion):
|
||||
def _intel_compiler_compare(
|
||||
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
|
||||
) -> bool:
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
# Test major and minor versions. Ignore build version.
|
||||
if len(pversion.version) < 2 or len(cversion.version) < 2:
|
||||
return False
|
||||
return pversion.version[:2] == cversion.version[:2]
|
||||
pv = pversion.lo
|
||||
cv = cversion.lo
|
||||
return pv.up_to(2) == cv.up_to(2)
|
||||
|
||||
def compiler_compatible(self, parent, child, **kwargs):
|
||||
def compiler_compatible(
|
||||
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
@@ -95,7 +101,7 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
# Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if kwargs.get("loose", False):
|
||||
if loose:
|
||||
return True
|
||||
|
||||
# TODO: Can we move the specialized ABI matching stuff
|
||||
@@ -116,9 +122,10 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(self, target, constraint, **kwargs):
|
||||
def compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
loosematch = kwargs.get("loose", False)
|
||||
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
|
||||
target, constraint, loose=loosematch
|
||||
target, constraint, loose=loose
|
||||
)
|
||||
|
||||
@@ -38,10 +38,13 @@ def _search_duplicate_compilers(error_cls):
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import itertools
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import warnings
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -798,3 +801,76 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
errors.append(err)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
external_detection = AuditClass(
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
def packages_with_detection_tests():
|
||||
"""Return the list of packages with a corresponding detection_test.yaml file."""
|
||||
import spack.config
|
||||
import spack.util.path
|
||||
|
||||
to_be_tested = []
|
||||
for current_repo in spack.repo.PATH.repos:
|
||||
namespace = current_repo.namespace
|
||||
packages_dir = pathlib.PurePath(current_repo.packages_path)
|
||||
pattern = packages_dir / "**" / "detection_test.yaml"
|
||||
pkgs_with_tests = [
|
||||
f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern))
|
||||
]
|
||||
to_be_tested.extend(pkgs_with_tests)
|
||||
|
||||
return to_be_tested
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
errors = []
|
||||
|
||||
# Filter the packages and retain only the ones with detection tests
|
||||
pkgs_with_tests = packages_with_detection_tests()
|
||||
selected_pkgs = []
|
||||
for current_package in pkgs_with_tests:
|
||||
_, unqualified_name = spack.repo.partition_package_name(current_package)
|
||||
# Check for both unqualified name and qualified name
|
||||
if unqualified_name in pkgs or current_package in pkgs:
|
||||
selected_pkgs.append(current_package)
|
||||
selected_pkgs.sort()
|
||||
|
||||
if not selected_pkgs:
|
||||
summary = "No detection test to run"
|
||||
details = [f' "{p}" has no detection test' for p in pkgs]
|
||||
warnings.warn("\n".join([summary] + details))
|
||||
return errors
|
||||
|
||||
for pkg_name in selected_pkgs:
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
not_detected = set(expected_specs) - set(specs)
|
||||
if not_detected:
|
||||
summary = pkg_name + ": cannot detect some specs"
|
||||
details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
not_expected = set(specs) - set(expected_specs)
|
||||
if not_expected:
|
||||
summary = pkg_name + ": detected unexpected specs"
|
||||
msg = '"{0}" was detected, but was not expected [test_id={1}]'
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import List, NamedTuple, Optional, Union
|
||||
from typing import Dict, List, NamedTuple, Optional, Tuple, Union
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
@@ -34,6 +34,7 @@
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
@@ -215,11 +216,11 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
with self._index_file_cache.read_transaction(cache_key):
|
||||
db._read_from_file(cache_path)
|
||||
except spack_db.InvalidDatabaseVersionError as e:
|
||||
msg = (
|
||||
tty.warn(
|
||||
f"you need a newer Spack version to read the buildcache index for the "
|
||||
f"following mirror: '{mirror_url}'. {e.database_version_message}"
|
||||
)
|
||||
raise BuildcacheIndexError(msg) from e
|
||||
return
|
||||
|
||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||
|
||||
@@ -624,8 +625,7 @@ def buildinfo_file_name(prefix):
|
||||
"""
|
||||
Filename of the binary package meta-data file
|
||||
"""
|
||||
name = os.path.join(prefix, ".spack/binary_distribution")
|
||||
return name
|
||||
return os.path.join(prefix, ".spack/binary_distribution")
|
||||
|
||||
|
||||
def read_buildinfo_file(prefix):
|
||||
@@ -646,8 +646,7 @@ class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||
directories."""
|
||||
|
||||
def __init__(self):
|
||||
# Save unique identifiers of files to avoid
|
||||
# relocating hardlink files for each path.
|
||||
# Save unique identifiers of hardlinks to avoid relocating them multiple times
|
||||
self.visited = set()
|
||||
|
||||
# Lists of files we will check
|
||||
@@ -656,6 +655,8 @@ def __init__(self):
|
||||
|
||||
def seen_before(self, root, rel_path):
|
||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||
if stat_result.st_nlink == 1:
|
||||
return False
|
||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||
if identifier in self.visited:
|
||||
return True
|
||||
@@ -796,11 +797,7 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return os.path.join(
|
||||
str(spec.architecture),
|
||||
f"{spec.compiler.name}-{spec.compiler.version}",
|
||||
f"{spec.name}-{spec.version}",
|
||||
)
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@@ -808,10 +805,10 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
return (
|
||||
f"{spec.architecture}-{spec.compiler.name}-{spec.compiler.version}-"
|
||||
f"{spec.name}-{spec.version}-{spec.dag_hash()}{ext}"
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
def tarball_path_name(spec, ext):
|
||||
@@ -912,7 +909,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
|
||||
# Push the hash
|
||||
@@ -920,7 +917,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
|
||||
|
||||
@@ -1156,57 +1153,99 @@ def gzip_compressed_tarfile(path):
|
||||
yield tar
|
||||
|
||||
|
||||
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
|
||||
# We only add files, symlinks, hardlinks, and directories
|
||||
# No character devices, block devices and FIFOs should ever enter a tarball.
|
||||
if tarinfo.isdev():
|
||||
return None
|
||||
|
||||
# For distribution, it makes no sense to user/group data; since (a) they don't exist
|
||||
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
|
||||
# ownership if it can. We want to extract as the current user. By setting owner to root,
|
||||
# root will extract as root, and non-privileged user will extract as themselves.
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
tarinfo.uname = ""
|
||||
tarinfo.gname = ""
|
||||
|
||||
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
|
||||
# touched; as long as the content does not change, this ensures we get stable tarballs.
|
||||
tarinfo.mtime = 0
|
||||
|
||||
# Normalize mode
|
||||
if tarinfo.isfile() or tarinfo.islnk():
|
||||
# If user can execute, use 0o755; else 0o644
|
||||
# This is to avoid potentially unsafe world writable & exeutable files that may get
|
||||
# extracted when Python or tar is run with privileges
|
||||
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
|
||||
else: # symbolic link and directories
|
||||
tarinfo.mode = 0o755
|
||||
|
||||
return tarinfo
|
||||
def _tarinfo_name(p: str):
|
||||
return p.lstrip("/")
|
||||
|
||||
|
||||
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(name=path)
|
||||
tarinfo.size = len(bstring)
|
||||
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
||||
Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks.
|
||||
Normalizes permissions like git. Tar entries are added in depth-first pre-order, with
|
||||
dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility.
|
||||
Partitioning ensures only one dir is in memory at a time, and sorting improves compression.
|
||||
|
||||
Args:
|
||||
tar: tarfile object to add files to
|
||||
prefix: absolute install prefix of spec"""
|
||||
if not os.path.isabs(prefix) or not os.path.isdir(prefix):
|
||||
raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory")
|
||||
hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict()
|
||||
stat_key = lambda stat: (stat.st_dev, stat.st_ino)
|
||||
|
||||
try: # skip buildinfo file if it exists
|
||||
files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))]
|
||||
except OSError:
|
||||
files_to_skip = []
|
||||
|
||||
dir_stack = [prefix]
|
||||
while dir_stack:
|
||||
dir = dir_stack.pop()
|
||||
|
||||
# Add the dir before its contents
|
||||
dir_info = tarfile.TarInfo(_tarinfo_name(dir))
|
||||
dir_info.type = tarfile.DIRTYPE
|
||||
dir_info.mode = 0o755
|
||||
tar.addfile(dir_info)
|
||||
|
||||
# Sort by name: reproducible & improves compression
|
||||
with os.scandir(dir) as it:
|
||||
entries = sorted(it, key=lambda entry: entry.name)
|
||||
|
||||
new_dirs = []
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
new_dirs.append(entry.path)
|
||||
continue
|
||||
|
||||
file_info = tarfile.TarInfo(_tarinfo_name(entry.path))
|
||||
|
||||
s = entry.stat(follow_symlinks=False)
|
||||
|
||||
# Skip existing binary distribution files.
|
||||
id = stat_key(s)
|
||||
if id in files_to_skip:
|
||||
continue
|
||||
|
||||
# Normalize the mode
|
||||
file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755
|
||||
|
||||
if entry.is_symlink():
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
file_info.linkname = os.readlink(entry.path)
|
||||
tar.addfile(file_info)
|
||||
|
||||
elif entry.is_file(follow_symlinks=False):
|
||||
# Deduplicate hardlinks
|
||||
if s.st_nlink > 1:
|
||||
if id in hardlink_to_tarinfo_name:
|
||||
file_info.type = tarfile.LNKTYPE
|
||||
file_info.linkname = hardlink_to_tarinfo_name[id]
|
||||
tar.addfile(file_info)
|
||||
continue
|
||||
hardlink_to_tarinfo_name[id] = file_info.name
|
||||
|
||||
# If file not yet seen, copy it.
|
||||
file_info.type = tarfile.REGTYPE
|
||||
file_info.size = s.st_size
|
||||
|
||||
with open(entry.path, "rb") as f:
|
||||
tar.addfile(file_info, f)
|
||||
|
||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
||||
|
||||
|
||||
def deterministic_tarinfo_without_buildinfo(tarinfo: tarfile.TarInfo):
|
||||
"""Skip buildinfo file when creating a tarball, and normalize other tarinfo fields."""
|
||||
if tarinfo.name.endswith("/.spack/binary_distribution"):
|
||||
return None
|
||||
|
||||
return deterministic_tarinfo(tarinfo)
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, pkg_dir: str, buildinfo: dict):
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo_without_buildinfo)
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
# Tarball the install prefix
|
||||
tarfile_of_spec_prefix(tar, binaries_dir)
|
||||
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir)))
|
||||
tarinfo.type = tarfile.REGTYPE
|
||||
tarinfo.size = len(bstring)
|
||||
tarinfo.mode = 0o644
|
||||
tar.addfile(tarinfo, io.BytesIO(bstring))
|
||||
|
||||
|
||||
class PushOptions(NamedTuple):
|
||||
@@ -1278,14 +1317,12 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
_do_create_tarball(tarfile_path, binaries_dir, buildinfo)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
@@ -1417,7 +1454,7 @@ def try_fetch(url_to_fetch):
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except web_util.FetchError:
|
||||
except spack.error.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
@@ -1580,9 +1617,10 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
for rel_path in buildinfo[key]:
|
||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||
if identifier in visited:
|
||||
continue
|
||||
visited.add(identifier)
|
||||
if stat_result.st_nlink > 1:
|
||||
if identifier in visited:
|
||||
continue
|
||||
visited.add(identifier)
|
||||
new_list.append(rel_path)
|
||||
buildinfo[key] = new_list
|
||||
|
||||
@@ -2144,7 +2182,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
if not os.path.exists(stage.save_filename):
|
||||
try:
|
||||
stage.fetch()
|
||||
except web_util.FetchError:
|
||||
except spack.error.FetchError:
|
||||
continue
|
||||
|
||||
tty.debug("Found key {0}".format(fingerprint))
|
||||
@@ -2296,7 +2334,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
try:
|
||||
stage.fetch()
|
||||
break
|
||||
except web_util.FetchError as e:
|
||||
except spack.error.FetchError as e:
|
||||
tty.debug(e)
|
||||
else:
|
||||
if fail_if_missing:
|
||||
|
||||
@@ -228,7 +228,7 @@ def _install_and_test(
|
||||
if not abstract_spec.intersects(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
if python_spec is not None and not abstract_spec.intersects(f"^{python_spec}"):
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
@@ -446,16 +446,11 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
env_mods = spack.util.environment.EnvironmentModifications()
|
||||
for dep in concrete_spec.traverse(
|
||||
root=True, order="post", deptype=("link", "run")
|
||||
):
|
||||
env_mods.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(
|
||||
dep, set_package_py_globals=False
|
||||
)
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
|
||||
@@ -40,11 +40,15 @@
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
@@ -54,17 +58,21 @@
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
@@ -75,14 +83,12 @@
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
validate,
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
from spack.util.string import plural
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -109,7 +115,6 @@
|
||||
SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY"
|
||||
SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS"
|
||||
|
||||
|
||||
# Platform-specific library suffix.
|
||||
if sys.platform == "darwin":
|
||||
dso_suffix = "dylib"
|
||||
@@ -406,19 +411,13 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper
|
||||
(which have the prefix `SPACK_`) and also add the compiler wrappers
|
||||
to PATH.
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
|
||||
This determines the injected -L/-I/-rpath options; each
|
||||
of these specifies a search order and this function computes these
|
||||
options in a manner that is intended to match the DAG traversal order
|
||||
in `modifications_from_dependencies`: that method uses a post-order
|
||||
traversal so that `PrependPath` actions from dependencies take lower
|
||||
precedence; we use a post-order traversal here to match the visitation
|
||||
order of `modifications_from_dependencies` (so we are visiting the
|
||||
lowest priority packages first).
|
||||
"""
|
||||
This determines the injected -L/-I/-rpath options; each of these specifies a search order and
|
||||
this function computes these options in a manner that is intended to match the DAG traversal
|
||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||
is using topo order."""
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
@@ -537,45 +536,42 @@ def update_compiler_args_for_dep(dep):
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
|
||||
|
||||
def set_module_variables_for_package(pkg):
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
"""Populate the Python module of a package with some useful global names.
|
||||
This makes things easier for package writers.
|
||||
"""
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
marker = "_set_run_already_called"
|
||||
if getattr(pkg.module, marker, False):
|
||||
return
|
||||
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
|
||||
m = module
|
||||
m.make_jobs = jobs
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
m.cscript = Executable("cscript")
|
||||
if context == Context.BUILD:
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
m.make_jobs = jobs
|
||||
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
m.cscript = Executable("cscript")
|
||||
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
|
||||
# Put spack compiler paths in module scope.
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
@@ -599,9 +595,6 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
|
||||
|
||||
m.static_to_shared_library = static_to_shared_library
|
||||
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
setattr(m, marker, True)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
|
||||
@@ -727,12 +720,15 @@ def load_external_modules(pkg):
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def setup_package(pkg, dirty, context="build"):
|
||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
"""Execute all environment setup routines."""
|
||||
if context not in ["build", "test"]:
|
||||
raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context))
|
||||
if context not in (Context.BUILD, Context.TEST):
|
||||
raise ValueError(f"'context' must be Context.BUILD or Context.TEST - got {context}")
|
||||
|
||||
set_module_variables_for_package(pkg)
|
||||
# First populate the package.py's module with the relevant globals that could be used in any
|
||||
# of the setup_* functions.
|
||||
setup_context = SetupContext(pkg.spec, context=context)
|
||||
setup_context.set_all_package_py_globals()
|
||||
|
||||
# Keep track of env changes from packages separately, since we want to
|
||||
# issue warnings when packages make "suspicious" modifications.
|
||||
@@ -740,13 +736,15 @@ def setup_package(pkg, dirty, context="build"):
|
||||
env_mods = EnvironmentModifications()
|
||||
|
||||
# setup compilers for build contexts
|
||||
need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler)
|
||||
need_compiler = context == Context.BUILD or (
|
||||
context == Context.TEST and pkg.test_requires_compiler
|
||||
)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
|
||||
env_mods.extend(setup_context.get_env_modifications())
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
# architecture specific setup
|
||||
@@ -754,7 +752,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
target = platform.target(pkg.spec.architecture.target)
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
|
||||
if context == "build":
|
||||
if context == Context.BUILD:
|
||||
tty.debug("setup_package: setup build environment for root")
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_build_environment(env_mods)
|
||||
@@ -765,16 +763,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
elif context == "test":
|
||||
tty.debug("setup_package: setup test environment for root")
|
||||
env_mods.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
spack.user_environment.prefix_inspections(pkg.spec.platform),
|
||||
exclude=is_system_path,
|
||||
)
|
||||
)
|
||||
pkg.setup_run_environment(env_mods)
|
||||
elif context == Context.TEST:
|
||||
env_mods.prepend_path("PATH", ".")
|
||||
|
||||
# First apply the clean environment changes
|
||||
@@ -813,158 +802,245 @@ def setup_package(pkg, dirty, context="build"):
|
||||
return env_base
|
||||
|
||||
|
||||
def _make_runnable(pkg, env):
|
||||
# Helper method which prepends a Package's bin/ prefix to the PATH
|
||||
# environment variable
|
||||
prefix = pkg.prefix
|
||||
class EnvironmentVisitor:
|
||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
# For the roots (well, marked specs) we follow different edges
|
||||
# than for their deps, depending on the context.
|
||||
self.root_hashes = set(s.dag_hash() for s in roots)
|
||||
|
||||
for dirname in ["bin", "bin64"]:
|
||||
bin_dir = os.path.join(prefix, dirname)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
if context == Context.BUILD:
|
||||
# Drop direct run deps in build context
|
||||
# We don't really distinguish between install and build time test deps,
|
||||
# so we include them here as build-time test deps.
|
||||
self.root_depflag = dt.BUILD | dt.TEST | dt.LINK
|
||||
elif context == Context.TEST:
|
||||
# This is more of an extended run environment
|
||||
self.root_depflag = dt.TEST | dt.RUN | dt.LINK
|
||||
elif context == Context.RUN:
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item.edge.spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
depflag = self.root_depflag
|
||||
else:
|
||||
depflag = dt.LINK | dt.RUN
|
||||
return traverse.sort_edges(spec.edges_to_dependencies(depflag=depflag))
|
||||
|
||||
|
||||
def modifications_from_dependencies(
|
||||
spec, context, custom_mods_only=True, set_package_py_globals=True
|
||||
):
|
||||
"""Returns the environment modifications that are required by
|
||||
the dependencies of a spec and also applies modifications
|
||||
to this spec's package at module scope, if need be.
|
||||
class UseMode(Flag):
|
||||
#: Entrypoint spec (a spec to be built; an env root, etc)
|
||||
ROOT = auto()
|
||||
|
||||
Environment modifications include:
|
||||
#: A spec used at runtime, but no executables in PATH
|
||||
RUNTIME = auto()
|
||||
|
||||
- Updating PATH so that executables can be found
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies
|
||||
- Running custom package environment modifications
|
||||
#: A spec used at runtime, with executables in PATH
|
||||
RUNTIME_EXECUTABLE = auto()
|
||||
|
||||
Custom package modifications can conflict with the default PATH changes
|
||||
we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH
|
||||
environment variables), so this applies changes in a fixed order:
|
||||
#: A spec that's a direct build or test dep
|
||||
BUILDTIME_DIRECT = auto()
|
||||
|
||||
- All modifications (custom and default) from external deps first
|
||||
- All modifications from non-external deps afterwards
|
||||
#: A spec that should be visible in search paths in a build env.
|
||||
BUILDTIME = auto()
|
||||
|
||||
With that order, `PrependPath` actions from non-external default
|
||||
environment modifications will take precedence over custom modifications
|
||||
from external packages.
|
||||
#: Flag is set when the (node, mode) is finalized
|
||||
ADDED = auto()
|
||||
|
||||
A secondary constraint is that custom and default modifications are
|
||||
grouped on a per-package basis: combined with the post-order traversal this
|
||||
means that default modifications of dependents can override custom
|
||||
modifications of dependencies (again, this would only occur for PATH,
|
||||
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec for which we want the modifications
|
||||
context (str): either 'build' for build-time modifications or 'run'
|
||||
for run-time modifications
|
||||
custom_mods_only (bool): if True returns only custom modifications, if False
|
||||
returns custom and default modifications
|
||||
set_package_py_globals (bool): whether or not to set the global variables in the
|
||||
package.py files (this may be problematic when using buildcaches that have
|
||||
been built on a different but compatible OS)
|
||||
"""
|
||||
if context not in ["build", "run", "test"]:
|
||||
raise ValueError(
|
||||
"Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context)
|
||||
def effective_deptypes(
|
||||
*specs: spack.spec.Spec, context: Context = Context.BUILD
|
||||
) -> List[Tuple[spack.spec.Spec, UseMode]]:
|
||||
"""Given a list of input specs and a context, return a list of tuples of
|
||||
all specs that contribute to (environment) modifications, together with
|
||||
a flag specifying in what way they do so. The list is ordered topologically
|
||||
from root to leaf, meaning that environment modifications should be applied
|
||||
in reverse so that dependents override dependencies, not the other way around."""
|
||||
visitor = traverse.TopoVisitor(
|
||||
EnvironmentVisitor(*specs, context=context),
|
||||
key=lambda x: x.dag_hash(),
|
||||
root=True,
|
||||
all_edges=True,
|
||||
)
|
||||
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
|
||||
|
||||
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
||||
use_modes = defaultdict(lambda: UseMode(0))
|
||||
nodes_with_type = []
|
||||
|
||||
for edge in visitor.edges:
|
||||
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
||||
|
||||
# Mark the starting point
|
||||
if parent is None:
|
||||
use_modes[child] = UseMode.ROOT
|
||||
continue
|
||||
|
||||
parent_mode = use_modes[parent]
|
||||
|
||||
# Nothing to propagate.
|
||||
if not parent_mode:
|
||||
continue
|
||||
|
||||
# Dependending on the context, include particular deps from the root.
|
||||
if UseMode.ROOT & parent_mode:
|
||||
if context == Context.BUILD:
|
||||
if (dt.BUILD | dt.TEST) & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME_DIRECT
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME
|
||||
|
||||
elif context == Context.TEST:
|
||||
if (dt.RUN | dt.TEST) & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
elif dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
|
||||
elif context == Context.RUN:
|
||||
if dt.RUN & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
elif dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
|
||||
# Propagate RUNTIME and RUNTIME_EXECUTABLE through link and run deps.
|
||||
if (UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE | UseMode.BUILDTIME_DIRECT) & parent_mode:
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
if dt.RUN & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
|
||||
# Propagate BUILDTIME through link deps.
|
||||
if UseMode.BUILDTIME & parent_mode:
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME
|
||||
|
||||
# Finalize the spec; the invariant is that all in-edges are processed
|
||||
# before out-edges, meaning that parent is done.
|
||||
if not (UseMode.ADDED & parent_mode):
|
||||
use_modes[parent] |= UseMode.ADDED
|
||||
nodes_with_type.append((parent, parent_mode))
|
||||
|
||||
# Attach the leaf nodes, since we only added nodes with out-edges.
|
||||
for spec, parent_mode in use_modes.items():
|
||||
if parent_mode and not (UseMode.ADDED & parent_mode):
|
||||
nodes_with_type.append((spec, parent_mode))
|
||||
|
||||
return nodes_with_type
|
||||
|
||||
|
||||
class SetupContext:
|
||||
"""This class encapsulates the logic to determine environment modifications, and is used as
|
||||
well to set globals in modules of package.py."""
|
||||
|
||||
def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
|
||||
"""Construct a ModificationsFromDag object.
|
||||
Args:
|
||||
specs: single root spec for build/test context, possibly more for run context
|
||||
context: build, run, or test"""
|
||||
if (context == Context.BUILD or context == Context.TEST) and not len(specs) == 1:
|
||||
raise ValueError("Cannot setup build environment for multiple specs")
|
||||
specs_with_type = effective_deptypes(*specs, context=context)
|
||||
|
||||
self.specs = specs
|
||||
self.context = context
|
||||
self.external: List[Tuple[spack.spec.Spec, UseMode]]
|
||||
self.nonexternal: List[Tuple[spack.spec.Spec, UseMode]]
|
||||
# Reverse so we go from leaf to root
|
||||
self.nodes_in_subdag = set(id(s) for s, _ in specs_with_type)
|
||||
|
||||
# Split into non-external and external, maintaining topo order per group.
|
||||
self.external, self.nonexternal = stable_partition(
|
||||
reversed(specs_with_type), lambda t: t[0].external
|
||||
)
|
||||
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
|
||||
self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
|
||||
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
|
||||
|
||||
env = EnvironmentModifications()
|
||||
if context == Context.RUN or context == Context.TEST:
|
||||
self.should_be_runnable |= UseMode.ROOT
|
||||
self.should_setup_run_env |= UseMode.ROOT
|
||||
|
||||
# Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this
|
||||
# function; these sets form the building blocks of those collections.
|
||||
build_deps = set(spec.dependencies(deptype=("build", "test")))
|
||||
link_deps = set(spec.traverse(root=False, deptype="link"))
|
||||
build_link_deps = build_deps | link_deps
|
||||
build_and_supporting_deps = set()
|
||||
for build_dep in build_deps:
|
||||
build_and_supporting_deps.update(build_dep.traverse(deptype="run"))
|
||||
run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link")))
|
||||
test_and_supporting_deps = set()
|
||||
for test_dep in set(spec.dependencies(deptype="test")):
|
||||
test_and_supporting_deps.update(test_dep.traverse(deptype="run"))
|
||||
# Everything that calls setup_run_environment and setup_dependent_* needs globals set.
|
||||
self.should_set_package_py_globals = (
|
||||
self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT
|
||||
)
|
||||
# In a build context, the root and direct build deps need build-specific globals set.
|
||||
self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT
|
||||
|
||||
# All dependencies that might have environment modifications to apply
|
||||
custom_mod_deps = set()
|
||||
if context == "build":
|
||||
custom_mod_deps.update(build_and_supporting_deps)
|
||||
# Tests may be performed after build
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
else:
|
||||
# test/run context
|
||||
custom_mod_deps.update(run_and_supporting_deps)
|
||||
if context == "test":
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
custom_mod_deps.update(link_deps)
|
||||
def set_all_package_py_globals(self):
|
||||
"""Set the globals in modules of package.py files."""
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
|
||||
# Determine 'exe_deps': the set of packages with binaries we want to use
|
||||
if context == "build":
|
||||
exe_deps = build_and_supporting_deps | test_and_supporting_deps
|
||||
elif context == "run":
|
||||
exe_deps = set(spec.traverse(deptype="run"))
|
||||
elif context == "test":
|
||||
exe_deps = test_and_supporting_deps
|
||||
if self.should_set_package_py_globals & flag:
|
||||
if self.context == Context.BUILD and self.needs_build_context & flag:
|
||||
set_package_py_globals(pkg, context=Context.BUILD)
|
||||
else:
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
|
||||
def default_modifications_for_dep(dep):
|
||||
if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build":
|
||||
prefix = dep.prefix
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
if id(spec) not in self.nodes_in_subdag:
|
||||
continue
|
||||
dependent_module = ModuleChangePropagator(spec.package)
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
env.prepend_path("CMAKE_PREFIX_PATH", prefix)
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
- Updating PATH for packages that are required at runtime
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies (when context=build)
|
||||
- Running custom package environment modifications (setup_run_environment,
|
||||
setup_dependent_build_environment, setup_dependent_run_environment)
|
||||
|
||||
for directory in ("lib", "lib64", "share"):
|
||||
pcdir = os.path.join(prefix, directory, "pkgconfig")
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path("PKG_CONFIG_PATH", pcdir)
|
||||
The (partial) order imposed on the specs is externals first, then topological
|
||||
from leaf to root. That way externals cannot contribute search paths that would shadow
|
||||
Spack's prefixes, and dependents override variables set by dependencies."""
|
||||
env = EnvironmentModifications()
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
tty.debug(f"Adding env modifications for {dspec.name}")
|
||||
pkg = dspec.package
|
||||
|
||||
if dep in exe_deps and not is_system_path(dep.prefix):
|
||||
_make_runnable(dep, env)
|
||||
if self.should_setup_dependent_build_env & flag:
|
||||
self._make_buildtime_detectable(dspec, env)
|
||||
|
||||
def add_modifications_for_dep(dep):
|
||||
tty.debug("Adding env modifications for {0}".format(dep.name))
|
||||
# Some callers of this function only want the custom modifications.
|
||||
# For callers that want both custom and default modifications, we want
|
||||
# to perform the default modifications here (this groups custom
|
||||
# and default modifications together on a per-package basis).
|
||||
if not custom_mods_only:
|
||||
default_modifications_for_dep(dep)
|
||||
for spec in self.specs:
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
|
||||
# Perform custom modifications here (PrependPath actions performed in
|
||||
# the custom method override the default environment modifications
|
||||
# we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and
|
||||
# PKG_CONFIG_PATH)
|
||||
if dep in custom_mod_deps:
|
||||
dpkg = dep.package
|
||||
if set_package_py_globals:
|
||||
set_module_variables_for_package(dpkg)
|
||||
if self.should_be_runnable & flag:
|
||||
self._make_runnable(dspec, env)
|
||||
|
||||
current_module = ModuleChangePropagator(spec.package)
|
||||
dpkg.setup_dependent_package(current_module, spec)
|
||||
current_module.propagate_changes_to_mro()
|
||||
if self.should_setup_run_env & flag:
|
||||
# TODO: remove setup_dependent_run_environment...
|
||||
for spec in dspec.dependents(deptype=dt.RUN):
|
||||
if id(spec) in self.nodes_in_subdag:
|
||||
pkg.setup_dependent_run_environment(env, spec)
|
||||
pkg.setup_run_environment(env)
|
||||
return env
|
||||
|
||||
if context == "build":
|
||||
builder = spack.builder.create(dpkg)
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
else:
|
||||
dpkg.setup_dependent_run_environment(env, spec)
|
||||
tty.debug("Added env modifications for {0}".format(dep.name))
|
||||
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
if is_system_path(dep.prefix):
|
||||
return
|
||||
|
||||
# Note that we want to perform environment modifications in a fixed order.
|
||||
# The Spec.traverse method provides this: i.e. in addition to
|
||||
# the post-order semantics, it also guarantees a fixed traversal order
|
||||
# among dependencies which are not constrained by post-order semantics.
|
||||
for dspec in spec.traverse(root=False, order="post"):
|
||||
if dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
env.prepend_path("CMAKE_PREFIX_PATH", dep.prefix)
|
||||
for d in ("lib", "lib64", "share"):
|
||||
pcdir = os.path.join(dep.prefix, d, "pkgconfig")
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path("PKG_CONFIG_PATH", pcdir)
|
||||
|
||||
for dspec in spec.traverse(root=False, order="post"):
|
||||
# Default env modifications for non-external packages can override
|
||||
# custom modifications of external packages (this can only occur
|
||||
# for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH)
|
||||
if not dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
if is_system_path(dep.prefix):
|
||||
return
|
||||
|
||||
return env
|
||||
for d in ("bin", "bin64"):
|
||||
bin_dir = os.path.join(dep.prefix, d)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
@@ -996,7 +1072,7 @@ def get_cmake_prefix_path(pkg):
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
@@ -1012,7 +1088,7 @@ def _setup_pkg_and_run(
|
||||
if not kwargs.get("fake", False):
|
||||
kwargs["unmodified_env"] = os.environ.copy()
|
||||
kwargs["env_modifications"] = setup_package(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
pkg, dirty=kwargs.get("dirty", False), context=Context.from_string(context)
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
@@ -46,6 +46,7 @@ class AutotoolsPackage(spack.package_base.PackageBase):
|
||||
depends_on("gnuconfig", type="build", when="target=ppc64le:")
|
||||
depends_on("gnuconfig", type="build", when="target=aarch64:")
|
||||
depends_on("gnuconfig", type="build", when="target=riscv64:")
|
||||
depends_on("gmake", type="build")
|
||||
conflicts("platform=windows")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
|
||||
@@ -142,10 +142,10 @@ def flags_to_build_system_args(self, flags):
|
||||
# We specify for each of them.
|
||||
if flags["ldflags"]:
|
||||
ldflags = " ".join(flags["ldflags"])
|
||||
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
|
||||
# cmake has separate linker arguments for types of builds.
|
||||
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
self.cmake_flag_args.append(ld_string.format(type, ldflags))
|
||||
self.cmake_flag_args.append(f"-DCMAKE_EXE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_MODULE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_SHARED_LINKER_FLAGS={ldflags}")
|
||||
|
||||
# CMake has libs options separated by language. Apply ours to each.
|
||||
if flags["ldlibs"]:
|
||||
|
||||
@@ -9,7 +9,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
@@ -29,7 +30,10 @@ class MakefilePackage(spack.package_base.PackageBase):
|
||||
legacy_buildsystem = "makefile"
|
||||
|
||||
build_system("makefile")
|
||||
conflicts("platform=windows", when="build_system=makefile")
|
||||
|
||||
with when("build_system=makefile"):
|
||||
conflicts("platform=windows")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("makefile")
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, variant
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
@@ -47,6 +47,13 @@ class MesonPackage(spack.package_base.PackageBase):
|
||||
variant("strip", default=False, description="Strip targets on install")
|
||||
depends_on("meson", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
# Python detection in meson requires distutils to be importable, but distutils no longer
|
||||
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
||||
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
||||
# when setuptools is in PYTHONPATH; it has to be in system site-packages. In a future meson
|
||||
# release, the distutils requirement will be dropped, so this conflict can be relaxed.
|
||||
# We have patches to make it work with meson 1.1 and above.
|
||||
conflicts("^python@3.12:", when="^meson@:1.0")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
"""Produces a list of all command line arguments to pass the specified
|
||||
|
||||
@@ -61,6 +61,11 @@ def component_prefix(self):
|
||||
"""Path to component <prefix>/<component>/<version>."""
|
||||
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
||||
|
||||
@property
|
||||
def env_script_args(self):
|
||||
"""Additional arguments to pass to vars.sh script."""
|
||||
return ()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.install_component(basename(self.url_for_version(spec.version)))
|
||||
|
||||
@@ -124,7 +129,7 @@ def setup_run_environment(self, env):
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
from typing import Optional
|
||||
|
||||
import archspec
|
||||
@@ -16,6 +17,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
@@ -24,6 +26,7 @@
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.install_test import test_part
|
||||
from spack.util.executable import Executable
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
@@ -226,7 +229,48 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
it depends on.
|
||||
|
||||
First search: an "installed" external that shares a prefix with this package
|
||||
Second search: a configured external that shares a prefix with this package
|
||||
Third search: search this prefix for a python package
|
||||
|
||||
Returns:
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
@@ -273,54 +317,16 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
it depends on.
|
||||
|
||||
First search: an "installed" external that shares a prefix with this package
|
||||
Second search: a configured external that shares a prefix with this package
|
||||
Third search: search this prefix for a python package
|
||||
|
||||
Returns:
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Headers may be in either location
|
||||
include = self.prefix.join(self.spec["python"].package.include)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
|
||||
|
||||
if headers:
|
||||
@@ -334,18 +340,64 @@ def libs(self):
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
library = "lib" + self.spec.name[3:].replace("-", "?")
|
||||
root = self.prefix.join(self.spec["python"].package.platlib)
|
||||
name = self.spec.name[3:]
|
||||
|
||||
for shared in [True, False]:
|
||||
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
|
||||
if libs:
|
||||
return libs
|
||||
root = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
|
||||
libs = fs.find_all_libraries(root, recursive=True)
|
||||
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
msg = "Unable to recursively locate {} libraries in {}"
|
||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||
|
||||
|
||||
def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes):
|
||||
# Recurse into the install prefix and fixup shebangs
|
||||
exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
dirs = [path]
|
||||
hardlinks = set()
|
||||
|
||||
while dirs:
|
||||
with os.scandir(dirs.pop()) as entries:
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
dirs.append(entry.path)
|
||||
continue
|
||||
|
||||
# Only consider files, not symlinks
|
||||
if not entry.is_file(follow_symlinks=False):
|
||||
continue
|
||||
|
||||
lstat = entry.stat(follow_symlinks=False)
|
||||
|
||||
# Skip over files that are not executable
|
||||
if not (lstat.st_mode & exe):
|
||||
continue
|
||||
|
||||
# Don't modify hardlinks more than once
|
||||
if lstat.st_nlink > 1:
|
||||
key = (lstat.st_ino, lstat.st_dev)
|
||||
if key in hardlinks:
|
||||
continue
|
||||
hardlinks.add(key)
|
||||
|
||||
# Finally replace shebangs if any.
|
||||
with open(entry.path, "rb+") as f:
|
||||
contents = f.read(2)
|
||||
if contents != b"#!":
|
||||
continue
|
||||
contents += f.read()
|
||||
|
||||
if old_interpreter not in contents:
|
||||
continue
|
||||
|
||||
f.seek(0)
|
||||
f.write(contents.replace(old_interpreter, new_interpreter))
|
||||
f.truncate()
|
||||
|
||||
|
||||
@spack.builder.builder("python_pip")
|
||||
class PythonPipBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
@@ -442,8 +494,36 @@ def global_options(self, spec, prefix):
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def _build_venv_path(self):
|
||||
"""Return the path to the virtual environment used for building when
|
||||
python is external."""
|
||||
return os.path.join(self.spec.package.stage.path, "build_env")
|
||||
|
||||
@property
|
||||
def _build_venv_python(self) -> Executable:
|
||||
"""Return the Python executable in the build virtual environment when
|
||||
python is external."""
|
||||
return Executable(os.path.join(self._build_venv_path, "bin", "python"))
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
python: Executable = spec["python"].command
|
||||
# Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot
|
||||
# execute hooks from user and system site-packages.
|
||||
if spec["python"].external:
|
||||
# There are no environment variables to disable the system site-packages, so we use a
|
||||
# virtual environment instead. The downside of this approach is that pip produces
|
||||
# incorrect shebangs that refer to the virtual environment, which we have to fix up.
|
||||
python("-m", "venv", "--without-pip", self._build_venv_path)
|
||||
pip = self._build_venv_python
|
||||
else:
|
||||
# For a Spack managed Python, system site-packages is empty/unused by design, so it
|
||||
# suffices to disable user site-packages, for which there is an environment variable.
|
||||
pip = python
|
||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||
pip.add_default_arg("-m")
|
||||
pip.add_default_arg("pip")
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
|
||||
|
||||
@@ -467,8 +547,31 @@ def install(self, pkg, spec, prefix):
|
||||
else:
|
||||
args.append(".")
|
||||
|
||||
pip = inspect.getmodule(pkg).pip
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def fixup_shebangs_pointing_to_build(self):
|
||||
"""When installing a package using an external python, we use a temporary virtual
|
||||
environment which improves build isolation. The downside is that pip produces shebangs
|
||||
that point to the temporary virtual environment. This method fixes them up to point to the
|
||||
underlying Python."""
|
||||
# No need to fixup shebangs if no build venv was used. (this post install function also
|
||||
# runs when install was overridden in another package, so check existence of the venv path)
|
||||
if not os.path.exists(self._build_venv_path):
|
||||
return
|
||||
|
||||
# Use sys.executable, since that's what pip uses.
|
||||
interpreter = (
|
||||
lambda python: python("-c", "import sys; print(sys.executable)", output=str)
|
||||
.strip()
|
||||
.encode("utf-8")
|
||||
)
|
||||
|
||||
fixup_shebangs(
|
||||
path=self.spec.prefix,
|
||||
old_interpreter=interpreter(self._build_venv_python),
|
||||
new_interpreter=interpreter(self.spec["python"].command),
|
||||
)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@@ -64,7 +64,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
|
||||
@property
|
||||
def subdirectory(self):
|
||||
if self.racket_name:
|
||||
if self.pkg.racket_name:
|
||||
return "pkgs/{0}".format(self.pkg.racket_name)
|
||||
return None
|
||||
|
||||
|
||||
@@ -49,7 +49,11 @@
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
)
|
||||
|
||||
spack_gpg = spack.main.SpackCommand("gpg")
|
||||
spack_compiler = spack.main.SpackCommand("compiler")
|
||||
@@ -69,48 +73,23 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return False
|
||||
|
||||
|
||||
def get_job_name(spec, osarch, build_group):
|
||||
"""Given the necessary parts, format the gitlab job name
|
||||
def get_job_name(spec: spack.spec.Spec, build_group: str = ""):
|
||||
"""Given a spec and possibly a build group, return the job name. If the
|
||||
resulting name is longer than 255 characters, it will be truncated.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
build_group (str): Name of build group this job belongs to (a CDash
|
||||
notion)
|
||||
|
||||
Returns: The job name
|
||||
"""
|
||||
item_idx = 0
|
||||
format_str = ""
|
||||
format_args = []
|
||||
|
||||
format_str += "{{{0}}}".format(item_idx)
|
||||
format_args.append(spec.name)
|
||||
item_idx += 1
|
||||
|
||||
format_str += "/{{{0}}}".format(item_idx)
|
||||
format_args.append(spec.dag_hash(7))
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(osarch)
|
||||
item_idx += 1
|
||||
job_name = spec.format(JOB_NAME_FORMAT)
|
||||
|
||||
if build_group:
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(build_group)
|
||||
item_idx += 1
|
||||
job_name = "{0} {1}".format(job_name, build_group)
|
||||
|
||||
return format_str.format(*format_args)
|
||||
return job_name[:255]
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
@@ -308,7 +287,7 @@ def append_dep(s, d):
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype=all):
|
||||
for s in spec.traverse(deptype="all"):
|
||||
if s.external:
|
||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||
continue
|
||||
@@ -316,7 +295,7 @@ def append_dep(s, d):
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
for d in s.dependencies(deptype="all"):
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg("Will not stage external dep: {0}".format(d))
|
||||
@@ -337,7 +316,7 @@ def _spec_matches(spec, match_string):
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
@@ -347,7 +326,7 @@ def _format_job_needs(
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(dep_job, dep_job.architecture, build_group),
|
||||
"job": get_job_name(dep_job, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
@@ -700,7 +679,7 @@ def generate_gitlab_ci_yaml(
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option.
|
||||
on branch name, facilitated by this option. DEPRECATED
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
@@ -797,17 +776,39 @@ def generate_gitlab_ci_yaml(
|
||||
"instead.",
|
||||
)
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
if remote_mirror_override:
|
||||
tty.die(
|
||||
"Using the deprecated --buildcache-destination cli option and "
|
||||
"having a mirror named 'buildcache-destination' at the same time "
|
||||
"is not allowed"
|
||||
)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
# TODO: Remove this block in spack 0.23
|
||||
remote_mirror_url = None
|
||||
if deprecated_mirror_config:
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copies = {}
|
||||
buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url
|
||||
buildcache_copy_src_prefix = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
# Check for a list of "known broken" specs that we should not bother
|
||||
@@ -819,6 +820,7 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
@@ -827,13 +829,15 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
@@ -853,8 +857,9 @@ def generate_gitlab_ci_yaml(
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
shared_pr_mirror = None
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add(
|
||||
@@ -906,6 +911,7 @@ def generate_gitlab_ci_yaml(
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
@@ -920,11 +926,11 @@ def generate_gitlab_ci_yaml(
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
# (including the override mirror we may have just added above).
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
@@ -1023,19 +1029,23 @@ def main_script_replacements(cmd):
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(release_spec, osname, build_group)
|
||||
job_name = get_job_name(release_spec, build_group)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
@@ -1046,7 +1056,6 @@ def main_script_replacements(cmd):
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
@@ -1132,6 +1141,7 @@ def main_script_replacements(cmd):
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
@@ -1161,10 +1171,12 @@ def main_script_replacements(cmd):
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config:
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||
|
||||
@@ -1195,10 +1207,28 @@ def main_script_replacements(cmd):
|
||||
sync_job["needs"] = [
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
]
|
||||
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
|
||||
if "buildcache-source" in pipeline_mirrors:
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
else:
|
||||
# TODO: Remove this condition in Spack 0.23
|
||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
|
||||
output_object["copy"] = sync_job
|
||||
job_id += 1
|
||||
|
||||
if job_id > 0:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
# schedule a job to clean up the temporary storage location
|
||||
@@ -1232,6 +1262,13 @@ def main_script_replacements(cmd):
|
||||
signing_job["when"] = "always"
|
||||
signing_job["retry"] = {"max": 2, "when": ["always"]}
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
||||
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
@@ -1240,13 +1277,13 @@ def main_script_replacements(cmd):
|
||||
stage_names.append("stage-rebuild-index")
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
target_mirror = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
target_mirror = buildcache_destination.push_url
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror),
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||
)
|
||||
|
||||
final_job["when"] = "always"
|
||||
@@ -1268,20 +1305,24 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||
"SPACK_VERSION": spack_version,
|
||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||
@@ -2021,43 +2062,23 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
|
||||
|
||||
def create_buildcache(
|
||||
input_spec: spack.spec.Spec,
|
||||
*,
|
||||
pipeline_mirror_url: Optional[str] = None,
|
||||
buildcache_mirror_url: Optional[str] = None,
|
||||
sign_binaries: bool = False,
|
||||
input_spec: spack.spec.Spec, *, destination_mirror_urls: List[str], sign_binaries: bool = False
|
||||
) -> List[PushResult]:
|
||||
"""Create the buildcache at the provided mirror(s).
|
||||
|
||||
Arguments:
|
||||
input_spec: Installed spec to package and push
|
||||
buildcache_mirror_url: URL for the buildcache mirror
|
||||
pipeline_mirror_url: URL for the pipeline mirror
|
||||
destination_mirror_urls: List of urls to push to
|
||||
sign_binaries: Whether or not to sign buildcache entry
|
||||
|
||||
Returns: A list of PushResults, indicating success or failure.
|
||||
"""
|
||||
results = []
|
||||
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
for mirror_url in destination_mirror_urls:
|
||||
results.append(
|
||||
PushResult(
|
||||
success=push_mirror_contents(input_spec, buildcache_mirror_url, sign_binaries),
|
||||
url=buildcache_mirror_url,
|
||||
)
|
||||
)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
results.append(
|
||||
PushResult(
|
||||
success=push_mirror_contents(input_spec, pipeline_mirror_url, sign_binaries),
|
||||
url=pipeline_mirror_url,
|
||||
success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
from textwrap import dedent
|
||||
from typing import List, Match, Tuple
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import attr_setdefault, index_by
|
||||
@@ -29,7 +30,6 @@
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.string
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
@@ -516,7 +516,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % spack.util.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
@@ -20,6 +21,15 @@ def setup_parser(subparser):
|
||||
# Audit configuration files
|
||||
sp.add_parser("configs", help="audit configuration files")
|
||||
|
||||
# Audit package recipes
|
||||
external_parser = sp.add_parser("externals", help="check external detection in packages")
|
||||
external_parser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
dest="list_externals",
|
||||
help="if passed, list which packages have detection tests",
|
||||
)
|
||||
|
||||
# Https and other linting
|
||||
https_parser = sp.add_parser("packages-https", help="check https in packages")
|
||||
https_parser.add_argument(
|
||||
@@ -29,7 +39,7 @@ def setup_parser(subparser):
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser("packages", help="audit package recipes")
|
||||
|
||||
for group in [pkg_parser, https_parser]:
|
||||
for group in [pkg_parser, https_parser, external_parser]:
|
||||
group.add_argument(
|
||||
"name",
|
||||
metavar="PKG",
|
||||
@@ -62,6 +72,18 @@ def packages_https(parser, args):
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def externals(parser, args):
|
||||
if args.list_externals:
|
||||
msg = "@*{The following packages have detection tests:}"
|
||||
tty.msg(cl.colorize(msg))
|
||||
llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2)
|
||||
return
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize("@*b{" + subcommand + "}:"))
|
||||
@@ -78,6 +100,7 @@ def list(parser, args):
|
||||
def audit(parser, args):
|
||||
subcommands = {
|
||||
"configs": configs,
|
||||
"externals": externals,
|
||||
"packages": packages,
|
||||
"packages-https": packages_https,
|
||||
"list": list,
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
from spack.context import Context
|
||||
|
||||
description = (
|
||||
"run a command in a spec's install environment, or dump its environment to screen or file"
|
||||
@@ -14,4 +15,4 @@
|
||||
|
||||
|
||||
def build_env(parser, args):
|
||||
env_utility.emulate_env_utility("build-env", "build", args)
|
||||
env_utility.emulate_env_utility("build-env", Context.BUILD, args)
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -32,7 +33,6 @@
|
||||
from spack.cmd import display_specs
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -268,7 +268,7 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
def push_fn(args: argparse.Namespace):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
@@ -414,7 +414,7 @@ def preview_fn(args):
|
||||
)
|
||||
|
||||
|
||||
def check_fn(args):
|
||||
def check_fn(args: argparse.Namespace):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
@@ -429,7 +429,7 @@ def check_fn(args):
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs, specs)
|
||||
specs = _matching_specs(specs)
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
@@ -527,7 +527,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except web_util.FetchError as e:
|
||||
except spack.error.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
tty.debug(e)
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
|
||||
@@ -15,6 +16,7 @@
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
@@ -66,7 +68,7 @@ def setup_parser(subparser):
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
)
|
||||
@@ -96,7 +98,7 @@ def checksum(parser, args):
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
@@ -119,27 +121,47 @@ def checksum(parser, args):
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
url_dict = remote_versions
|
||||
|
||||
# A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs.
|
||||
# For example, GitHub release pages sometimes have multiple tarballs with different shasum:
|
||||
# - releases/download/1.0/<pkg>-1.0.tar.gz (uploaded tarball)
|
||||
# - archive/refs/tags/1.0.tar.gz (generated tarball)
|
||||
# We wanna ensure that `spack checksum` and `spack install` ultimately use the same URL, so
|
||||
# here we check whether the crawled and computed URLs disagree, and if so, prioritize the
|
||||
# former if that URL exists (just sending a HEAD request that is).
|
||||
url_changed_for_version = set()
|
||||
for version, url in url_dict.items():
|
||||
possible_urls = pkg.all_urls_for_version(version)
|
||||
if url not in possible_urls:
|
||||
for possible_url in possible_urls:
|
||||
if web_util.url_exists(possible_url):
|
||||
url_dict[version] = possible_url
|
||||
break
|
||||
else:
|
||||
url_changed_for_version.add(version)
|
||||
|
||||
if not url_dict:
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
filtered_url_dict = spack.stage.interactive_version_filter(
|
||||
url_dict, pkg.versions, url_changes=url_changed_for_version
|
||||
)
|
||||
if not filtered_url_dict:
|
||||
exit(0)
|
||||
url_dict = filtered_url_dict
|
||||
else:
|
||||
tty.info(f"Found {llnl.string.plural(len(url_dict), 'version')} of {pkg.name}")
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options,
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage, fetch_options=pkg.fetch_options
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
@@ -239,7 +261,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
|
||||
@@ -191,6 +191,14 @@ def ci_generate(args):
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
|
||||
|
||||
if args.buildcache_destination:
|
||||
tty.warn(
|
||||
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
|
||||
)
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
@@ -264,12 +272,6 @@ def ci_rebuild(args):
|
||||
if not ci_config:
|
||||
tty.die("spack ci rebuild requires an env containing ci cfg")
|
||||
|
||||
tty.msg(
|
||||
"SPACK_BUILDCACHE_DESTINATION={0}".format(
|
||||
os.environ.get("SPACK_BUILDCACHE_DESTINATION", None)
|
||||
)
|
||||
)
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -277,6 +279,7 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
@@ -285,9 +288,12 @@ def ci_rebuild(args):
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
@@ -344,21 +350,36 @@ def ci_rebuild(args):
|
||||
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
# TODO: Remove in Spack 0.23
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
buildcache_mirror_url = buildcache_destination.push_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
# artifacts from upstream to downstream jobs.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
pipeline_mirror_url = None
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
@@ -454,12 +475,14 @@ def ci_rebuild(args):
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
@@ -477,7 +500,8 @@ def ci_rebuild(args):
|
||||
)
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
if shared_pr_mirror_url != "None":
|
||||
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||
|
||||
@@ -499,6 +523,7 @@ def ci_rebuild(args):
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_mirror:
|
||||
matching_mirror = matches[0]["mirror_url"]
|
||||
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
|
||||
@@ -513,7 +538,8 @@ def ci_rebuild(args):
|
||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||
# package destination. This ensures that the when we rebuild everything, we only
|
||||
# consume binary dependencies built in this pipeline.
|
||||
if full_rebuild:
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and full_rebuild:
|
||||
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
@@ -579,7 +605,9 @@ def ci_rebuild(args):
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")),
|
||||
"install-deps/{}".format(
|
||||
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
]
|
||||
@@ -676,21 +704,25 @@ def ci_rebuild(args):
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
if buildcache_mirror_url or pipeline_mirror_url:
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror_urls.append(pipeline_mirror_url)
|
||||
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.dependency as dep
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.mirror
|
||||
import spack.modules
|
||||
@@ -114,16 +114,13 @@ def __call__(self, parser, namespace, jobs, option_string):
|
||||
|
||||
|
||||
class DeptypeAction(argparse.Action):
|
||||
"""Creates a tuple of valid dependency types from a deptype argument."""
|
||||
"""Creates a flag of valid dependency types from a deptype argument."""
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
deptype = dep.all_deptypes
|
||||
if values:
|
||||
deptype = tuple(x.strip() for x in values.split(","))
|
||||
if deptype == ("all",):
|
||||
deptype = "all"
|
||||
deptype = dep.canonical_deptype(deptype)
|
||||
|
||||
if not values or values == "all":
|
||||
deptype = dt.ALL
|
||||
else:
|
||||
deptype = dt.canonicalize(values.split(","))
|
||||
setattr(namespace, self.dest, deptype)
|
||||
|
||||
|
||||
@@ -285,9 +282,8 @@ def deptype():
|
||||
return Args(
|
||||
"--deptype",
|
||||
action=DeptypeAction,
|
||||
default=dep.all_deptypes,
|
||||
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
|
||||
% ",".join(dep.all_deptypes),
|
||||
default=dt.ALL,
|
||||
help="comma-separated list of deptypes to traverse (default=%s)" % ",".join(dt.ALL_TYPES),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,14 +7,15 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import traverse
|
||||
from spack import build_environment, traverse
|
||||
from spack.context import Context
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
@@ -41,14 +42,14 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
class AreDepsInstalledVisitor:
|
||||
def __init__(self, context="build"):
|
||||
if context not in ("build", "test"):
|
||||
raise ValueError("context can only be build or test")
|
||||
|
||||
if context == "build":
|
||||
self.direct_deps = ("build", "link", "run")
|
||||
def __init__(self, context: Context = Context.BUILD):
|
||||
if context == Context.BUILD:
|
||||
# TODO: run deps shouldn't be required for build env.
|
||||
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
|
||||
elif context == Context.TEST:
|
||||
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
|
||||
else:
|
||||
self.direct_deps = ("build", "test", "link", "run")
|
||||
raise ValueError("context can only be Context.BUILD or Context.TEST")
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
@@ -71,11 +72,11 @@ def accept(self, item):
|
||||
def neighbors(self, item):
|
||||
# Direct deps: follow build & test edges.
|
||||
# Transitive deps: follow link / run.
|
||||
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
|
||||
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
|
||||
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
|
||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
def emulate_env_utility(cmd_name, context: Context, args):
|
||||
if not args.spec:
|
||||
tty.die("spack %s requires a spec." % cmd_name)
|
||||
|
||||
@@ -119,7 +120,7 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
hashes=True,
|
||||
# This shows more than necessary, but we cannot dynamically change deptypes
|
||||
# in Spec.tree(...).
|
||||
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
|
||||
deptypes="all" if context == Context.BUILD else ("build", "test", "link", "run"),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -62,6 +63,9 @@ class {class_name}({base_class_name}):
|
||||
# notify when the package is updated.
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
# FIXME: Add the SPDX identifier of the project's license below.
|
||||
license("UNKNOWN")
|
||||
|
||||
{versions}
|
||||
|
||||
{dependencies}
|
||||
@@ -822,7 +826,12 @@ def get_versions(args, name):
|
||||
if args.url is not None and args.template != "bundle" and valid_url:
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.util.web.find_versions_of_archive(args.url)
|
||||
url_dict = spack.url.find_versions_of_archive(args.url)
|
||||
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
|
||||
if url_dict_filtered is None:
|
||||
exit(0)
|
||||
url_dict = url_dict_filtered
|
||||
except UndetectableVersionError:
|
||||
# Use fake versions
|
||||
tty.warn("Couldn't detect version in: {0}".format(args.url))
|
||||
@@ -834,11 +843,7 @@ def get_versions(args, name):
|
||||
url_dict = {version: args.url}
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
url_dict, name, first_stage_function=guesser, keep_stage=args.keep_stage
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
|
||||
@@ -74,7 +74,7 @@ def dependencies(parser, args):
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
deptype=args.deptype,
|
||||
depflag=args.deptype,
|
||||
)
|
||||
|
||||
if spec.name in dependencies:
|
||||
|
||||
@@ -8,7 +8,9 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -28,7 +30,6 @@
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.util.string as string
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
description = "manage virtual environments"
|
||||
@@ -96,22 +97,16 @@ def env_activate_setup_parser(subparser):
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
"-v",
|
||||
"--with-view",
|
||||
action="store_const",
|
||||
dest="with_view",
|
||||
const=True,
|
||||
default=True,
|
||||
help="update PATH, etc., with associated view",
|
||||
"-v",
|
||||
metavar="name",
|
||||
help="set runtime environment variables for specific view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"-V",
|
||||
"--without-view",
|
||||
action="store_const",
|
||||
dest="with_view",
|
||||
const=False,
|
||||
default=True,
|
||||
help="do not update PATH, etc., with associated view",
|
||||
"-V",
|
||||
action="store_true",
|
||||
help="do not set runtime environment variables for any view",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -197,10 +192,20 @@ def env_activate(args):
|
||||
|
||||
# Activate new environment
|
||||
active_env = ev.Environment(env_path)
|
||||
|
||||
# Check if runtime environment variables are requested, and if so, for what view.
|
||||
view: Optional[str] = None
|
||||
if args.with_view:
|
||||
view = args.with_view
|
||||
if not active_env.has_view(view):
|
||||
tty.die(f"The environment does not have a view named '{view}'")
|
||||
elif not args.without_view and active_env.has_view(ev.default_view_name):
|
||||
view = ev.default_view_name
|
||||
|
||||
cmds += spack.environment.shell.activate_header(
|
||||
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None
|
||||
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None, view=view
|
||||
)
|
||||
env_mods.extend(spack.environment.shell.activate(env=active_env, add_view=args.with_view))
|
||||
env_mods.extend(spack.environment.shell.activate(env=active_env, view=view))
|
||||
cmds += env_mods.shell_modifications(args.shell)
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -156,11 +157,20 @@ def packages_to_search_for(
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True))
|
||||
|
||||
if names:
|
||||
result = [x for x in result if x in names]
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in names]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if select_re.search(x)]
|
||||
|
||||
if exclude:
|
||||
result = [x for x in result if x not in exclude]
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in exclude]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if not select_re.search(x)]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@@ -74,19 +74,19 @@ def graph(parser, args):
|
||||
|
||||
if args.static:
|
||||
args.dot = True
|
||||
static_graph_dot(specs, deptype=args.deptype)
|
||||
static_graph_dot(specs, depflag=args.deptype)
|
||||
return
|
||||
|
||||
if args.dot:
|
||||
builder = SimpleDAG()
|
||||
if args.color:
|
||||
builder = DAGWithDependencyTypes()
|
||||
graph_dot(specs, builder=builder, deptype=args.deptype)
|
||||
graph_dot(specs, builder=builder, depflag=args.deptype)
|
||||
return
|
||||
|
||||
# ascii is default: user doesn't need to provide it explicitly
|
||||
debug = spack.config.get("config:debug")
|
||||
graph_ascii(specs[0], debug=debug, deptype=args.deptype)
|
||||
graph_ascii(specs[0], debug=debug, depflag=args.deptype)
|
||||
for spec in specs[1:]:
|
||||
print() # extra line bt/w independent graphs
|
||||
graph_ascii(spec, debug=debug)
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
import spack.repo
|
||||
@@ -71,6 +72,10 @@ def variant(s):
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
def license(s):
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
def __init__(self, variants):
|
||||
self.variants = variants
|
||||
@@ -160,7 +165,7 @@ def print_dependencies(pkg):
|
||||
for deptype in ("build", "link", "run"):
|
||||
color.cprint("")
|
||||
color.cprint(section_title("%s Dependencies:" % deptype.capitalize()))
|
||||
deps = sorted(pkg.dependencies_of_type(deptype))
|
||||
deps = sorted(pkg.dependencies_of_type(dt.flag_from_string(deptype)))
|
||||
if deps:
|
||||
colify(deps, indent=4)
|
||||
else:
|
||||
@@ -347,6 +352,22 @@ def print_virtuals(pkg):
|
||||
color.cprint(" None")
|
||||
|
||||
|
||||
def print_licenses(pkg):
|
||||
"""Output the licenses of the project."""
|
||||
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Licenses: "))
|
||||
|
||||
if len(pkg.licenses) == 0:
|
||||
color.cprint(" None")
|
||||
else:
|
||||
pad = padder(pkg.licenses, 4)
|
||||
for when_spec in pkg.licenses:
|
||||
license_identifier = pkg.licenses[when_spec]
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
@@ -376,6 +397,7 @@ def info(parser, args):
|
||||
(args.all or not args.no_dependencies, print_dependencies),
|
||||
(args.all or args.virtuals, print_virtuals),
|
||||
(args.all or args.tests, print_tests),
|
||||
(args.all or True, print_licenses),
|
||||
]
|
||||
for print_it, func in sections:
|
||||
if print_it:
|
||||
|
||||
@@ -240,8 +240,7 @@ def default_log_file(spec):
|
||||
"""Computes the default filename for the log file and creates
|
||||
the corresponding directory if not present
|
||||
"""
|
||||
fmt = "test-{x.name}-{x.version}-{hash}.xml"
|
||||
basename = fmt.format(x=spec, hash=spec.dag_hash())
|
||||
basename = spec.format_path("test-{name}-{version}-{hash}.xml")
|
||||
dirname = fs.os.path.join(spack.paths.reports_path, "junit")
|
||||
fs.mkdirp(dirname)
|
||||
return fs.os.path.join(dirname, basename)
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
from spack.version import VersionList
|
||||
|
||||
@@ -149,8 +149,8 @@ def rows_for_ncols(elts, ncols):
|
||||
|
||||
def get_dependencies(pkg):
|
||||
all_deps = {}
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg.dependencies_of_type(deptype)
|
||||
for deptype in dt.ALL_TYPES:
|
||||
deps = pkg.dependencies_of_type(dt.flag_from_string(deptype))
|
||||
all_deps[deptype] = [d for d in deps]
|
||||
|
||||
return all_deps
|
||||
@@ -275,8 +275,8 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write("\n")
|
||||
out.write("</dd>\n")
|
||||
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg_cls.dependencies_of_type(deptype)
|
||||
for deptype in dt.ALL_TYPES:
|
||||
deps = pkg_cls.dependencies_of_type(dt.flag_from_string(deptype))
|
||||
if deps:
|
||||
out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize())
|
||||
out.write("<dd>\n")
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.find
|
||||
@@ -108,16 +110,14 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if "dependencies" in args.things_to_load:
|
||||
include_roots = "package" in args.things_to_load
|
||||
specs = [
|
||||
dep for spec in specs for dep in spec.traverse(root=include_roots, order="post")
|
||||
]
|
||||
if args.things_to_load != "package,dependencies":
|
||||
tty.warn(
|
||||
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
|
||||
)
|
||||
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs)
|
||||
for spec in specs:
|
||||
env_mod.extend(uenv.environment_modifications_for_spec(spec))
|
||||
env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
|
||||
cmds = env_mod.shell_modifications(args.shell)
|
||||
|
||||
|
||||
@@ -6,10 +6,11 @@
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
from spack.spec import Spec
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
description = "generate Windows installer"
|
||||
section = "admin"
|
||||
|
||||
@@ -176,17 +176,29 @@ def solve(parser, args):
|
||||
output = sys.stdout if "asp" in show else None
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(specs, out=output, timers=args.timers, stats=args.stats)
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
):
|
||||
if "solutions" in show:
|
||||
tty.msg("ROUND {0}".format(idx))
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import io
|
||||
import sys
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
@@ -24,7 +25,7 @@ def report_tags(category, tags):
|
||||
if isatty:
|
||||
num = len(tags)
|
||||
fmt = "{0} package tag".format(category)
|
||||
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
|
||||
buffer.write("{0}:\n".format(llnl.string.plural(num, fmt)))
|
||||
|
||||
if tags:
|
||||
colify.colify(tags, output=buffer, tty=isatty, indent=4)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
from spack.context import Context
|
||||
|
||||
description = (
|
||||
"run a command in a spec's test environment, or dump its environment to screen or file"
|
||||
@@ -14,4 +15,4 @@
|
||||
|
||||
|
||||
def test_env(parser, args):
|
||||
env_utility.emulate_env_utility("test-env", "test", args)
|
||||
env_utility.emulate_env_utility("test-env", Context.TEST, args)
|
||||
|
||||
@@ -88,9 +88,8 @@ def unload(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs).reversed()
|
||||
for spec in specs:
|
||||
env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed())
|
||||
env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
|
||||
cmds = env_mod.shell_modifications(args.shell)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto as crypto
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
@@ -26,7 +27,6 @@
|
||||
substitution_offsets,
|
||||
)
|
||||
from spack.util.naming import simplify_name
|
||||
from spack.util.web import find_versions_of_archive
|
||||
|
||||
description = "debugging tool for url parsing"
|
||||
section = "developer"
|
||||
@@ -139,7 +139,7 @@ def url_parse(args):
|
||||
if args.spider:
|
||||
print()
|
||||
tty.msg("Spidering for versions:")
|
||||
versions = find_versions_of_archive(url)
|
||||
versions = spack.url.find_versions_of_archive(url)
|
||||
|
||||
if not versions:
|
||||
print(" Found no versions for {0}".format(name))
|
||||
|
||||
@@ -37,10 +37,7 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
help="only list remote versions newer than the latest checksummed version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
|
||||
|
||||
def versions(parser, args):
|
||||
@@ -68,7 +65,7 @@ def versions(parser, args):
|
||||
if args.safe:
|
||||
return
|
||||
|
||||
fetched_versions = pkg.fetch_remote_versions(args.concurrency)
|
||||
fetched_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
|
||||
if args.new:
|
||||
if sys.stdout.isatty():
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import tempfile
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
@@ -24,7 +25,6 @@
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
@@ -39,10 +39,17 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
version_arg (str): the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
compiler_invocation_args = {
|
||||
"output": str,
|
||||
"error": str,
|
||||
"ignore_errors": ignore_errors,
|
||||
"timeout": 120,
|
||||
"fail_on_error": True,
|
||||
}
|
||||
if version_arg:
|
||||
output = compiler(version_arg, output=str, error=str, ignore_errors=ignore_errors)
|
||||
output = compiler(version_arg, **compiler_invocation_args)
|
||||
else:
|
||||
output = compiler(output=str, error=str, ignore_errors=ignore_errors)
|
||||
output = compiler(**compiler_invocation_args)
|
||||
return output
|
||||
|
||||
|
||||
@@ -153,7 +160,7 @@ def _parse_link_paths(string):
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@llnl.path.system_path_filter
|
||||
def _parse_non_system_link_dirs(string: str) -> List[str]:
|
||||
"""Parses link paths out of compiler debug output.
|
||||
|
||||
@@ -229,6 +236,9 @@ class Compiler:
|
||||
# by any compiler
|
||||
_all_compiler_rpath_libraries = ["libc", "libc++", "libstdc++"]
|
||||
|
||||
#: Platform matcher for Platform objects supported by compiler
|
||||
is_supported_on_platform = lambda x: True
|
||||
|
||||
# Default flags used by a compiler to set an rpath
|
||||
@property
|
||||
def cc_rpath_arg(self):
|
||||
@@ -594,8 +604,6 @@ def search_regexps(cls, language):
|
||||
compiler_names = getattr(cls, "{0}_names".format(language))
|
||||
prefixes = [""] + cls.prefixes
|
||||
suffixes = [""]
|
||||
# Windows compilers generally have an extension of some sort
|
||||
# as do most files on Windows, handle that case here
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
from typing import Dict
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -298,7 +298,7 @@ def select_new_compilers(compilers, scope=None):
|
||||
return compilers_not_in_config
|
||||
|
||||
|
||||
def supported_compilers():
|
||||
def supported_compilers() -> List[str]:
|
||||
"""Return a set of names of compilers supported by Spack.
|
||||
|
||||
See available_compilers() to get a list of all the available
|
||||
@@ -306,10 +306,41 @@ def supported_compilers():
|
||||
"""
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
return sorted(
|
||||
name if name != "apple_clang" else "apple-clang"
|
||||
for name in llnl.util.lang.list_modules(spack.paths.compilers_path)
|
||||
)
|
||||
return sorted(all_compiler_names())
|
||||
|
||||
|
||||
def supported_compilers_for_host_platform() -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the current host platform
|
||||
"""
|
||||
host_plat = spack.platforms.real_host()
|
||||
return supported_compilers_for_platform(host_plat)
|
||||
|
||||
|
||||
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the provided platform
|
||||
|
||||
Args:
|
||||
platform (str): string representation of platform
|
||||
for which compiler compatability should be determined
|
||||
"""
|
||||
return [
|
||||
name
|
||||
for name in supported_compilers()
|
||||
if class_for_compiler_name(name).is_supported_on_platform(platform)
|
||||
]
|
||||
|
||||
|
||||
def all_compiler_names() -> List[str]:
|
||||
def replace_apple_clang(name):
|
||||
return name if name != "apple_clang" else "apple-clang"
|
||||
|
||||
return [replace_apple_clang(name) for name in all_compiler_module_names()]
|
||||
|
||||
|
||||
def all_compiler_module_names() -> List[str]:
|
||||
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
@@ -628,7 +659,7 @@ def arguments_to_detect_version_fn(operating_system, paths):
|
||||
def _default(search_paths):
|
||||
command_arguments = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in spack.compilers.supported_compilers():
|
||||
for compiler_name in spack.compilers.supported_compilers_for_host_platform():
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
@@ -687,9 +718,11 @@ def _default(fn_args):
|
||||
value = fn_args._replace(id=compiler_id._replace(version=version))
|
||||
return value, None
|
||||
|
||||
error = "Couldn't get version for compiler {0}".format(path)
|
||||
error = f"Couldn't get version for compiler {path}".format(path)
|
||||
except spack.util.executable.ProcessError as e:
|
||||
error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except spack.util.executable.ProcessTimeoutError as e:
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except Exception as e:
|
||||
# Catching "Exception" here is fine because it just
|
||||
# means something went wrong running a candidate executable.
|
||||
|
||||
@@ -112,6 +112,7 @@ def extract_version_from_output(cls, output):
|
||||
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
return "unknown"
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fortran_compiler):
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.version import StrictVersion
|
||||
from typing import Dict, List, Set
|
||||
|
||||
import spack.compiler
|
||||
@@ -115,11 +114,11 @@ def command_str(self):
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
if ver in fortran_mapping:
|
||||
if StrictVersion(cl_ver) <= StrictVersion(fortran_mapping[ver]):
|
||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
||||
return fc_path[ver]
|
||||
return None
|
||||
|
||||
@@ -154,9 +153,12 @@ class Msvc(Compiler):
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
version_regex = r"([1-9][0-9]*\.[0-9]*\.[0-9]*)"
|
||||
# The MSVC compiler class overrides this to prevent instances
|
||||
# of erroneous matching on executable names that cannot be msvc
|
||||
# compilers
|
||||
suffixes = []
|
||||
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
is_supported_on_platform = lambda x: isinstance(x, spack.platforms.Windows)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
@@ -167,6 +169,8 @@ def __init__(self, *args, **kwargs):
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
super().__init__(*args, **kwargs)
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
|
||||
@@ -155,7 +155,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
),
|
||||
)
|
||||
|
||||
def choose_virtual_or_external(self, spec):
|
||||
def choose_virtual_or_external(self, spec: spack.spec.Spec):
|
||||
"""Given a list of candidate virtual and external packages, try to
|
||||
find one that is most ABI compatible.
|
||||
"""
|
||||
@@ -744,8 +744,11 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(abstract_specs, tests=kwargs.get("tests", False))
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
@@ -272,13 +272,6 @@ def _os_pkg_manager(self):
|
||||
raise spack.error.SpackError(msg)
|
||||
return os_pkg_manager
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
|
||||
@tengine.context_property
|
||||
def labels(self):
|
||||
return self.container_config.get("labels", {})
|
||||
|
||||
29
lib/spack/spack/context.py
Normal file
29
lib/spack/spack/context.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module provides classes used in user and build environment"""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Context(Enum):
|
||||
"""Enum used to indicate the context in which an environment has to be setup: build,
|
||||
run or test."""
|
||||
|
||||
BUILD = 1
|
||||
RUN = 2
|
||||
TEST = 3
|
||||
|
||||
def __str__(self):
|
||||
return ("build", "run", "test")[self.value - 1]
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, s: str):
|
||||
if s == "build":
|
||||
return Context.BUILD
|
||||
elif s == "run":
|
||||
return Context.RUN
|
||||
elif s == "test":
|
||||
return Context.TEST
|
||||
raise ValueError(f"context should be one of 'build', 'run', 'test', got {s}")
|
||||
@@ -4,6 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
import jsonschema
|
||||
import jsonschema.exceptions
|
||||
@@ -11,6 +14,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as hash_types
|
||||
import spack.platforms
|
||||
@@ -45,9 +49,29 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
)
|
||||
|
||||
|
||||
def compiler_from_entry(entry):
|
||||
def compiler_from_entry(entry: dict, manifest_path: str):
|
||||
# Note that manifest_path is only passed here to compose a
|
||||
# useful warning message when paths appear to be missing.
|
||||
compiler_name = translated_compiler_name(entry["name"])
|
||||
paths = entry["executables"]
|
||||
|
||||
if "prefix" in entry:
|
||||
prefix = entry["prefix"]
|
||||
paths = dict(
|
||||
(lang, os.path.join(prefix, relpath))
|
||||
for (lang, relpath) in entry["executables"].items()
|
||||
)
|
||||
else:
|
||||
paths = entry["executables"]
|
||||
|
||||
# Do a check for missing paths. Note that this isn't possible for
|
||||
# all compiler entries, since their "paths" might actually be
|
||||
# exe names like "cc" that depend on modules being loaded. Cray
|
||||
# manifest entries are always paths though.
|
||||
missing_paths = []
|
||||
for path in paths.values():
|
||||
if not os.path.exists(path):
|
||||
missing_paths.append(path)
|
||||
|
||||
# to instantiate a compiler class we may need a concrete version:
|
||||
version = "={}".format(entry["version"])
|
||||
arch = entry["arch"]
|
||||
@@ -56,8 +80,18 @@ def compiler_from_entry(entry):
|
||||
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
return compiler_cls(spec, operating_system, target, paths)
|
||||
path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
|
||||
if missing_paths:
|
||||
warnings.warn(
|
||||
"Manifest entry refers to nonexistent paths:\n\t"
|
||||
+ "\n\t".join(missing_paths)
|
||||
+ f"\nfor {str(spec)}"
|
||||
+ f"\nin {manifest_path}"
|
||||
+ "\nPlease report this issue"
|
||||
)
|
||||
|
||||
return compiler_cls(spec, operating_system, target, path_list)
|
||||
|
||||
|
||||
def spec_from_entry(entry):
|
||||
@@ -158,13 +192,13 @@ def entries_to_specs(entries):
|
||||
dependencies = entry["dependencies"]
|
||||
for name, properties in dependencies.items():
|
||||
dep_hash = properties["hash"]
|
||||
deptypes = properties["type"]
|
||||
depflag = dt.canonicalize(properties["type"])
|
||||
if dep_hash in spec_dict:
|
||||
if entry["hash"] not in spec_dict:
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
parent_spec._add_dependency(dep_spec, depflag=depflag, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
@@ -186,12 +220,21 @@ def read(path, apply_updates):
|
||||
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
|
||||
compilers = list()
|
||||
if "compilers" in json_data:
|
||||
compilers.extend(compiler_from_entry(x) for x in json_data["compilers"])
|
||||
compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
|
||||
# Filter out the compilers that already appear in the configuration
|
||||
compilers = spack.compilers.select_new_compilers(compilers)
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(compilers, init_config=False)
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler], init_config=False)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
f"\n\tfrom manifest: {path}"
|
||||
"\nPlease reexecute with 'spack -d' and include the stack trace"
|
||||
)
|
||||
tty.debug(f"Include this\n{traceback.format_exc()}")
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.STORE.db.add(spec, directory_layout=None)
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
import time
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
import spack.deptypes as dt
|
||||
|
||||
try:
|
||||
import uuid
|
||||
|
||||
@@ -89,7 +91,7 @@
|
||||
|
||||
#: Types of dependencies tracked by the database
|
||||
#: We store by DAG hash, so we track the dependencies that the DAG hash includes.
|
||||
_TRACKED_DEPENDENCIES = ht.dag_hash.deptype
|
||||
_TRACKED_DEPENDENCIES = ht.dag_hash.depflag
|
||||
|
||||
#: Default list of fields written for each install record
|
||||
DEFAULT_INSTALL_RECORD_FIELDS = (
|
||||
@@ -795,7 +797,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1146,7 +1148,7 @@ def _add(
|
||||
# Retrieve optional arguments
|
||||
installation_time = installation_time or _now()
|
||||
|
||||
for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
if edge.spec.dag_hash() in self._data:
|
||||
continue
|
||||
# allow missing build-only deps. This prevents excessive
|
||||
@@ -1154,7 +1156,7 @@ def _add(
|
||||
# is missing a build dep; there's no need to install the
|
||||
# build dep's build dep first, and there's no need to warn
|
||||
# about it missing.
|
||||
dep_allow_missing = allow_missing or edge.deptypes == ("build",)
|
||||
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
|
||||
self._add(
|
||||
edge.spec,
|
||||
directory_layout,
|
||||
@@ -1198,10 +1200,10 @@ def _add(
|
||||
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
|
||||
|
||||
# Connect dependencies from the DB to the new copy.
|
||||
for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
@@ -1371,7 +1373,13 @@ def deprecate(self, spec, deprecator):
|
||||
return self._deprecate(spec, deprecator)
|
||||
|
||||
@_autospec
|
||||
def installed_relatives(self, spec, direction="children", transitive=True, deptype="all"):
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec,
|
||||
direction="children",
|
||||
transitive=True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
):
|
||||
"""Return installed specs related to this one."""
|
||||
if direction not in ("parents", "children"):
|
||||
raise ValueError("Invalid direction: %s" % direction)
|
||||
|
||||
@@ -3,64 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Data structures that represent Spack's dependency relationships."""
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, List
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.spec
|
||||
|
||||
#: The types of dependency relationships that Spack understands.
|
||||
all_deptypes = ("build", "link", "run", "test")
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
default_deptype = ("build", "link")
|
||||
|
||||
#: Type hint for the arguments accepting a dependency type
|
||||
DependencyArgument = Union[str, List[str], Tuple[str, ...]]
|
||||
|
||||
|
||||
def deptype_chars(*type_tuples: str) -> str:
|
||||
"""Create a string representing deptypes for many dependencies.
|
||||
|
||||
The string will be some subset of 'blrt', like 'bl ', 'b t', or
|
||||
' lr ' where each letter in 'blrt' stands for 'build', 'link',
|
||||
'run', and 'test' (the dependency types).
|
||||
|
||||
For a single dependency, this just indicates that the dependency has
|
||||
the indicated deptypes. For a list of dependnecies, this shows
|
||||
whether ANY dpeendency in the list has the deptypes (so the deptypes
|
||||
are merged).
|
||||
"""
|
||||
types: Set[str] = set()
|
||||
for t in type_tuples:
|
||||
if t:
|
||||
types.update(t)
|
||||
|
||||
return "".join(t[0] if t in types else " " for t in all_deptypes)
|
||||
|
||||
|
||||
def canonical_deptype(deptype: DependencyArgument) -> Tuple[str, ...]:
|
||||
"""Convert deptype to a canonical sorted tuple, or raise ValueError.
|
||||
|
||||
Args:
|
||||
deptype: string representing dependency type, or a list/tuple of such strings.
|
||||
Can also be the builtin function ``all`` or the string 'all', which result in
|
||||
a tuple of all dependency types known to Spack.
|
||||
"""
|
||||
if deptype in ("all", all):
|
||||
return all_deptypes
|
||||
|
||||
elif isinstance(deptype, str):
|
||||
if deptype not in all_deptypes:
|
||||
raise ValueError("Invalid dependency type: %s" % deptype)
|
||||
return (deptype,)
|
||||
|
||||
elif isinstance(deptype, (tuple, list, set)):
|
||||
bad = [d for d in deptype if d not in all_deptypes]
|
||||
if bad:
|
||||
raise ValueError("Invalid dependency types: %s" % ",".join(str(t) for t in bad))
|
||||
return tuple(sorted(set(deptype)))
|
||||
|
||||
raise ValueError("Invalid dependency type: %s" % repr(deptype))
|
||||
|
||||
|
||||
class Dependency:
|
||||
"""Class representing metadata for a dependency on a package.
|
||||
@@ -93,7 +40,7 @@ def __init__(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
spec: "spack.spec.Spec",
|
||||
type: Optional[Tuple[str, ...]] = default_deptype,
|
||||
depflag: dt.DepFlag = dt.DEFAULT,
|
||||
):
|
||||
"""Create a new Dependency.
|
||||
|
||||
@@ -110,11 +57,7 @@ def __init__(
|
||||
# This dict maps condition specs to lists of Patch objects, just
|
||||
# as the patches dict on packages does.
|
||||
self.patches: Dict[spack.spec.Spec, "List[spack.patch.Patch]"] = {}
|
||||
|
||||
if type is None:
|
||||
self.type = set(default_deptype)
|
||||
else:
|
||||
self.type = set(type)
|
||||
self.depflag = depflag
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -124,7 +67,7 @@ def name(self) -> str:
|
||||
def merge(self, other: "Dependency"):
|
||||
"""Merge constraints, deptypes, and patches of other into self."""
|
||||
self.spec.constrain(other.spec)
|
||||
self.type |= other.type
|
||||
self.depflag |= other.depflag
|
||||
|
||||
# concatenate patch lists, or just copy them in
|
||||
for cond, p in other.patches.items():
|
||||
@@ -135,5 +78,5 @@ def merge(self, other: "Dependency"):
|
||||
self.patches[cond] = other.patches[cond]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
types = deptype_chars(*self.type)
|
||||
types = dt.flag_to_chars(self.depflag)
|
||||
return f"<Dependency: {self.pkg.name} -> {self.spec} [{types}]>"
|
||||
|
||||
123
lib/spack/spack/deptypes.py
Normal file
123
lib/spack/spack/deptypes.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Data structures that represent Spack's edge types."""
|
||||
|
||||
from typing import Iterable, List, Tuple, Union
|
||||
|
||||
#: Type hint for the low-level dependency input (enum.Flag is too slow)
|
||||
DepFlag = int
|
||||
|
||||
#: Type hint for the high-level dependency input
|
||||
DepTypes = Union[str, List[str], Tuple[str, ...]]
|
||||
|
||||
#: Individual dependency types
|
||||
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
|
||||
|
||||
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
|
||||
# the order (link, run, build, test) when depending on the same package multiple times,
|
||||
# and we rely on default integer comparison to sort dependency types.
|
||||
# New dependency types should be appended.
|
||||
LINK = 0b0001
|
||||
RUN = 0b0010
|
||||
BUILD = 0b0100
|
||||
TEST = 0b1000
|
||||
|
||||
#: The types of dependency relationships that Spack understands.
|
||||
ALL_TYPES: Tuple[DepType, ...] = ("build", "link", "run", "test")
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
DEFAULT_TYPES: Tuple[DepType, ...] = ("build", "link")
|
||||
|
||||
#: A flag with all dependency types set
|
||||
ALL: DepFlag = BUILD | LINK | RUN | TEST
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
DEFAULT: DepFlag = BUILD | LINK
|
||||
|
||||
#: An iterator of all flag components
|
||||
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
|
||||
|
||||
|
||||
def flag_from_string(s: str) -> DepFlag:
|
||||
if s == "build":
|
||||
return BUILD
|
||||
elif s == "link":
|
||||
return LINK
|
||||
elif s == "run":
|
||||
return RUN
|
||||
elif s == "test":
|
||||
return TEST
|
||||
else:
|
||||
raise ValueError(f"Invalid dependency type: {s}")
|
||||
|
||||
|
||||
def flag_from_strings(deptype: Iterable[str]) -> DepFlag:
|
||||
"""Transform an iterable of deptype strings into a flag."""
|
||||
flag = 0
|
||||
for deptype_str in deptype:
|
||||
flag |= flag_from_string(deptype_str)
|
||||
return flag
|
||||
|
||||
|
||||
def canonicalize(deptype: DepTypes) -> DepFlag:
|
||||
"""Convert deptype user input to a DepFlag, or raise ValueError.
|
||||
|
||||
Args:
|
||||
deptype: string representing dependency type, or a list/tuple of such strings.
|
||||
Can also be the builtin function ``all`` or the string 'all', which result in
|
||||
a tuple of all dependency types known to Spack.
|
||||
"""
|
||||
if deptype in ("all", all):
|
||||
return ALL
|
||||
|
||||
if isinstance(deptype, str):
|
||||
return flag_from_string(deptype)
|
||||
|
||||
if isinstance(deptype, (tuple, list, set)):
|
||||
return flag_from_strings(deptype)
|
||||
|
||||
raise ValueError(f"Invalid dependency type: {deptype!r}")
|
||||
|
||||
|
||||
def flag_to_tuple(x: DepFlag) -> Tuple[DepType, ...]:
|
||||
deptype: List[DepType] = []
|
||||
if x & BUILD:
|
||||
deptype.append("build")
|
||||
if x & LINK:
|
||||
deptype.append("link")
|
||||
if x & RUN:
|
||||
deptype.append("run")
|
||||
if x & TEST:
|
||||
deptype.append("test")
|
||||
return tuple(deptype)
|
||||
|
||||
|
||||
def flag_to_string(x: DepFlag) -> DepType:
|
||||
if x == BUILD:
|
||||
return "build"
|
||||
elif x == LINK:
|
||||
return "link"
|
||||
elif x == RUN:
|
||||
return "run"
|
||||
elif x == TEST:
|
||||
return "test"
|
||||
else:
|
||||
raise ValueError(f"Invalid dependency type flag: {x}")
|
||||
|
||||
|
||||
def flag_to_chars(depflag: DepFlag) -> str:
|
||||
"""Create a string representing deptypes for many dependencies.
|
||||
|
||||
The string will be some subset of 'blrt', like 'bl ', 'b t', or
|
||||
' lr ' where each letter in 'blrt' stands for 'build', 'link',
|
||||
'run', and 'test' (the dependency types).
|
||||
|
||||
For a single dependency, this just indicates that the dependency has
|
||||
the indicated deptypes. For a list of dependnecies, this shows
|
||||
whether ANY dpeendency in the list has the deptypes (so the deptypes
|
||||
are merged)."""
|
||||
return "".join(
|
||||
t_str[0] if t_flag & depflag else " " for t_str, t_flag in zip(ALL_TYPES, ALL_FLAGS)
|
||||
)
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
@@ -11,4 +12,5 @@
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
"detection_tests",
|
||||
]
|
||||
|
||||
@@ -299,36 +299,36 @@ def find_windows_compiler_bundled_packages() -> List[str]:
|
||||
|
||||
|
||||
class WindowsKitExternalPaths:
|
||||
plat_major_ver = None
|
||||
if sys.platform == "win32":
|
||||
plat_major_ver = str(winOs.windows_version()[0])
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_roots() -> Optional[str]:
|
||||
def find_windows_kit_roots() -> List[str]:
|
||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||
if sys.platform != "win32":
|
||||
return None
|
||||
return []
|
||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||
kit_base = os.path.join(
|
||||
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
||||
)
|
||||
return kit_base
|
||||
kit_base = os.path.join(program_files, "Windows Kits", "**")
|
||||
return glob.glob(kit_base)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_bin = os.path.join(kit_base, "bin")
|
||||
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_bin = os.path.join(kit, "bin")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_lib = os.path.join(kit_base, "Lib")
|
||||
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_lib = os.path.join(kit, "Lib")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_driver_development_kit_paths() -> List[str]:
|
||||
@@ -347,23 +347,30 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]:
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value
|
||||
)
|
||||
kit_root_reg = re.compile(r"KitsRoot[0-9]+")
|
||||
root_paths = []
|
||||
for kit_root in filter(kit_root_reg.match, reg.get_values().keys()):
|
||||
root_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value)
|
||||
)
|
||||
return root_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_sdk_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
||||
% WindowsKitExternalPaths.plat_major_ver,
|
||||
sdk_paths = []
|
||||
sdk_regex = re.compile(r"v[0-9]+.[0-9]+")
|
||||
windows_reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows",
|
||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||
)
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]):
|
||||
reg = windows_reg.get_subkey(key)
|
||||
sdk_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
)
|
||||
return sdk_paths
|
||||
|
||||
|
||||
def find_win32_additional_install_paths() -> List[str]:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Detection of software installed in the system based on paths inspections
|
||||
"""Detection of software installed in the system, based on paths inspections
|
||||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
@@ -322,12 +322,14 @@ def by_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
"""Return the list of packages that have been detected on the system, keyed by
|
||||
unqualified package name.
|
||||
|
||||
Args:
|
||||
packages_to_search: list of package classes to be detected
|
||||
packages_to_search: list of packages to be detected. Each package can be either unqualified
|
||||
of fully qualified
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
@@ -355,7 +357,8 @@ def by_path(
|
||||
try:
|
||||
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||
if detected:
|
||||
result[pkg_name].extend(detected)
|
||||
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
|
||||
result[unqualified_name].extend(detected)
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
|
||||
187
lib/spack/spack/detection/test.py
Normal file
187
lib/spack/spack/detection/test.py
Normal file
@@ -0,0 +1,187 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Create and run mock e2e tests for package detection."""
|
||||
import collections
|
||||
import contextlib
|
||||
import pathlib
|
||||
import tempfile
|
||||
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
|
||||
|
||||
import jinja2
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.util import spack_yaml
|
||||
|
||||
from .path import by_path
|
||||
|
||||
|
||||
class MockExecutables(NamedTuple):
|
||||
"""Mock executables to be used in detection tests"""
|
||||
|
||||
#: Relative paths for mock executables to be created
|
||||
executables: List[str]
|
||||
#: Shell script for the mock executable
|
||||
script: str
|
||||
|
||||
|
||||
class ExpectedTestResult(NamedTuple):
|
||||
"""Data structure to model assertions on detection tests"""
|
||||
|
||||
#: Spec to be detected
|
||||
spec: str
|
||||
|
||||
|
||||
class DetectionTest(NamedTuple):
|
||||
"""Data structure to construct detection tests by PATH inspection.
|
||||
|
||||
Packages may have a YAML file containing the description of one or more detection tests
|
||||
to be performed. Each test creates a few mock executable scripts in a temporary folder,
|
||||
and checks that detection by PATH gives the expected results.
|
||||
"""
|
||||
|
||||
pkg_name: str
|
||||
layout: List[MockExecutables]
|
||||
results: List[ExpectedTestResult]
|
||||
|
||||
|
||||
class Runner:
|
||||
"""Runs an external detection test"""
|
||||
|
||||
def __init__(self, *, test: DetectionTest, repository: spack.repo.RepoPath) -> None:
|
||||
self.test = test
|
||||
self.repository = repository
|
||||
self.tmpdir = tempfile.TemporaryDirectory()
|
||||
|
||||
def execute(self) -> List[spack.spec.Spec]:
|
||||
"""Executes a test and returns the specs that have been detected.
|
||||
|
||||
This function sets-up a test in a temporary directory, according to the prescriptions
|
||||
in the test layout, then performs a detection by executables and returns the specs that
|
||||
have been detected.
|
||||
"""
|
||||
with self._mock_layout() as path_hints:
|
||||
entries = by_path([self.test.pkg_name], path_hints=path_hints)
|
||||
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
|
||||
specs = set(x.spec for x in entries[unqualified_name])
|
||||
return list(specs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _mock_layout(self) -> Generator[List[str], None, None]:
|
||||
hints = set()
|
||||
try:
|
||||
for entry in self.test.layout:
|
||||
exes = self._create_executable_scripts(entry)
|
||||
|
||||
for mock_executable in exes:
|
||||
hints.add(str(mock_executable.parent))
|
||||
|
||||
yield list(hints)
|
||||
finally:
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
|
||||
relative_paths = mock_executables.executables
|
||||
script = mock_executables.script
|
||||
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
|
||||
result = []
|
||||
for mock_exe_path in relative_paths:
|
||||
rel_path = pathlib.Path(mock_exe_path)
|
||||
abs_path = pathlib.Path(self.tmpdir.name) / rel_path
|
||||
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
abs_path.write_text(script_template.render(script=script))
|
||||
filesystem.set_executable(abs_path)
|
||||
result.append(abs_path)
|
||||
return result
|
||||
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [spack.spec.Spec(r.spec) for r in self.test.results]
|
||||
|
||||
|
||||
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
||||
"""Returns a list of test runners for a given package.
|
||||
|
||||
Currently, detection tests are specified in a YAML file, called ``detection_test.yaml``,
|
||||
alongside the ``package.py`` file.
|
||||
|
||||
This function reads that file to create a bunch of ``Runner`` objects.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository where the package lives
|
||||
"""
|
||||
result = []
|
||||
detection_tests_content = read_detection_tests(pkg_name, repository)
|
||||
|
||||
tests_by_path = detection_tests_content.get("paths", [])
|
||||
for single_test_data in tests_by_path:
|
||||
mock_executables = []
|
||||
for layout in single_test_data["layout"]:
|
||||
mock_executables.append(
|
||||
MockExecutables(executables=layout["executables"], script=layout["script"])
|
||||
)
|
||||
expected_results = []
|
||||
for assertion in single_test_data["results"]:
|
||||
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
|
||||
|
||||
current_test = DetectionTest(
|
||||
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
||||
)
|
||||
result.append(Runner(test=current_test, repository=repository))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def read_detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> Dict[str, Any]:
|
||||
"""Returns the normalized content of the detection_tests.yaml associated with the package
|
||||
passed in input.
|
||||
|
||||
The content is merged with that of any package that is transitively included using the
|
||||
"includes" attribute.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository in which to search for packages
|
||||
"""
|
||||
content_stack, seen = [], set()
|
||||
included_packages: Deque[str] = collections.deque()
|
||||
|
||||
root_detection_yaml, result = _detection_tests_yaml(pkg_name, repository)
|
||||
included_packages.extend(result.get("includes", []))
|
||||
seen |= set(result.get("includes", []))
|
||||
|
||||
while included_packages:
|
||||
current_package = included_packages.popleft()
|
||||
try:
|
||||
current_detection_yaml, content = _detection_tests_yaml(current_package, repository)
|
||||
except FileNotFoundError as e:
|
||||
msg = (
|
||||
f"cannot read the detection tests from the '{current_package}' package, "
|
||||
f"included by {root_detection_yaml}"
|
||||
)
|
||||
raise FileNotFoundError(msg + f"\n\n\t{e}\n")
|
||||
|
||||
content_stack.append((current_package, content))
|
||||
included_packages.extend(x for x in content.get("includes", []) if x not in seen)
|
||||
seen |= set(content.get("includes", []))
|
||||
|
||||
result.setdefault("paths", [])
|
||||
for pkg_name, content in content_stack:
|
||||
result["paths"].extend(content.get("paths", []))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _detection_tests_yaml(
|
||||
pkg_name: str, repository: spack.repo.RepoPath
|
||||
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
||||
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
||||
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
||||
with open(str(detection_tests_yaml)) as f:
|
||||
content = spack_yaml.load(f)
|
||||
return detection_tests_yaml, content
|
||||
@@ -38,13 +38,14 @@ class OpenMpi(Package):
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.patch
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||
from spack.dependency import Dependency
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
from spack.resource import Resource
|
||||
from spack.version import (
|
||||
@@ -63,6 +64,7 @@ class OpenMpi(Package):
|
||||
"depends_on",
|
||||
"extends",
|
||||
"maintainers",
|
||||
"license",
|
||||
"provides",
|
||||
"patch",
|
||||
"variant",
|
||||
@@ -436,7 +438,7 @@ def _execute_version(pkg, ver, **kwargs):
|
||||
pkg.versions[version] = kwargs
|
||||
|
||||
|
||||
def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
@@ -447,7 +449,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
if pkg.name == dep_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
|
||||
|
||||
type = canonical_deptype(type)
|
||||
depflag = dt.canonicalize(type)
|
||||
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
|
||||
|
||||
# call this patches here for clarity -- we want patch to be a list,
|
||||
@@ -477,12 +479,12 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
|
||||
# this is where we actually add the dependency to this package
|
||||
if when_spec not in conditions:
|
||||
dependency = Dependency(pkg, dep_spec, type=type)
|
||||
dependency = Dependency(pkg, dep_spec, depflag=depflag)
|
||||
conditions[when_spec] = dependency
|
||||
else:
|
||||
dependency = conditions[when_spec]
|
||||
dependency.spec.constrain(dep_spec, deps=False)
|
||||
dependency.type |= set(type)
|
||||
dependency.depflag |= depflag
|
||||
|
||||
# apply patches to the dependency
|
||||
for execute_patch in patches:
|
||||
@@ -525,7 +527,7 @@ def _execute_conflicts(pkg):
|
||||
|
||||
|
||||
@directive(("dependencies"))
|
||||
def depends_on(spec, when=None, type=default_deptype, patches=None):
|
||||
def depends_on(spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
|
||||
"""Creates a dict of deps with specs defining when they apply.
|
||||
|
||||
Args:
|
||||
@@ -861,6 +863,44 @@ def _execute_maintainer(pkg):
|
||||
return _execute_maintainer
|
||||
|
||||
|
||||
def _execute_license(pkg, license_identifier: str, when):
|
||||
# If when is not specified the license always holds
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
for other_when_spec in pkg.licenses:
|
||||
if when_spec.intersects(other_when_spec):
|
||||
when_message = ""
|
||||
if when_spec != make_when_spec(None):
|
||||
when_message = f"when {when_spec}"
|
||||
other_when_message = ""
|
||||
if other_when_spec != make_when_spec(None):
|
||||
other_when_message = f"when {other_when_spec}"
|
||||
err_msg = (
|
||||
f"{pkg.name} is specified as being licensed as {license_identifier} "
|
||||
f"{when_message}, but it is also specified as being licensed under "
|
||||
f"{pkg.licenses[other_when_spec]} {other_when_message}, which conflict."
|
||||
)
|
||||
raise OverlappingLicenseError(err_msg)
|
||||
|
||||
pkg.licenses[when_spec] = license_identifier
|
||||
|
||||
|
||||
@directive("licenses")
|
||||
def license(license_identifier: str, when=None):
|
||||
"""Add a new license directive, to specify the SPDX identifier the software is
|
||||
distributed under.
|
||||
|
||||
Args:
|
||||
license_identifiers: A list of SPDX identifiers specifying the licenses
|
||||
the software is distributed under.
|
||||
when: A spec specifying when the license applies.
|
||||
"""
|
||||
|
||||
return lambda pkg: _execute_license(pkg, license_identifier, when)
|
||||
|
||||
|
||||
@directive("requirements")
|
||||
def requires(*requirement_specs, policy="one_of", when=None, msg=None):
|
||||
"""Allows a package to request a configuration to be present in all valid solutions.
|
||||
@@ -919,3 +959,7 @@ class DependencyPatchError(DirectiveError):
|
||||
|
||||
class UnsupportedPackageDirective(DirectiveError):
|
||||
"""Raised when an invalid or unsupported package directive is specified."""
|
||||
|
||||
|
||||
class OverlappingLicenseError(DirectiveError):
|
||||
"""Raised when two licenses are declared that apply on overlapping specs."""
|
||||
|
||||
@@ -104,7 +104,7 @@ def relative_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
path = spec.format_path(projection)
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
@@ -120,10 +120,8 @@ def write_host_environment(self, spec):
|
||||
versioning. We use it in the case that an analysis later needs to
|
||||
easily access this information.
|
||||
"""
|
||||
from spack.util.environment import get_host_environment_metadata
|
||||
|
||||
env_file = self.env_metadata_path(spec)
|
||||
environ = get_host_environment_metadata()
|
||||
environ = spack.spec.get_host_environment_metadata()
|
||||
with open(env_file, "w") as fd:
|
||||
sjson.dump(environ, fd)
|
||||
|
||||
|
||||
@@ -365,6 +365,7 @@
|
||||
read,
|
||||
root,
|
||||
spack_env_var,
|
||||
spack_env_view_var,
|
||||
update_yaml,
|
||||
)
|
||||
|
||||
@@ -397,5 +398,6 @@
|
||||
"read",
|
||||
"root",
|
||||
"spack_env_var",
|
||||
"spack_env_view_var",
|
||||
"update_yaml",
|
||||
]
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.environment.environment as ev
|
||||
import spack.spec
|
||||
import spack.traverse as traverse
|
||||
@@ -36,7 +37,9 @@ def from_string(s: str) -> "UseBuildCache":
|
||||
def _deptypes(use_buildcache: UseBuildCache):
|
||||
"""What edges should we follow for a given node? If it's a cache-only
|
||||
node, then we can drop build type deps."""
|
||||
return ("link", "run") if use_buildcache == UseBuildCache.ONLY else ("build", "link", "run")
|
||||
return (
|
||||
dt.LINK | dt.RUN if use_buildcache == UseBuildCache.ONLY else dt.BUILD | dt.LINK | dt.RUN
|
||||
)
|
||||
|
||||
|
||||
class DepfileNode:
|
||||
@@ -69,13 +72,13 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
|
||||
self.adjacency_list: List[DepfileNode] = []
|
||||
self.pkg_buildcache = pkg_buildcache
|
||||
self.deps_buildcache = deps_buildcache
|
||||
self.deptypes_root = _deptypes(pkg_buildcache)
|
||||
self.deptypes_deps = _deptypes(deps_buildcache)
|
||||
self.depflag_root = _deptypes(pkg_buildcache)
|
||||
self.depflag_deps = _deptypes(deps_buildcache)
|
||||
|
||||
def neighbors(self, node):
|
||||
"""Produce a list of spec to follow from node"""
|
||||
deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes))
|
||||
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
|
||||
|
||||
def accept(self, node):
|
||||
self.adjacency_list.append(
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.hash_types as ht
|
||||
@@ -63,6 +64,8 @@
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
|
||||
#: environment variable used to indicate the active environment view
|
||||
spack_env_view_var = "SPACK_ENV_VIEW"
|
||||
|
||||
#: currently activated environment
|
||||
_active_environment: Optional["Environment"] = None
|
||||
@@ -403,7 +406,7 @@ def _write_yaml(data, str_or_file):
|
||||
|
||||
def _eval_conditional(string):
|
||||
"""Evaluate conditional definitions using restricted variable scope."""
|
||||
valid_variables = spack.util.environment.get_host_environment()
|
||||
valid_variables = spack.spec.get_host_environment()
|
||||
valid_variables.update({"re": re, "env": os.environ})
|
||||
return eval(string, valid_variables)
|
||||
|
||||
@@ -1395,7 +1398,10 @@ def _concretize_together_where_possible(
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
for result in solver.solve_in_rounds(specs_to_concretize, tests=tests):
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
result = []
|
||||
@@ -1474,11 +1480,12 @@ def _concretize_separately(self, tests=False):
|
||||
self._add_concrete_spec(s, concrete, new=False)
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
arguments, root_specs = [], []
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
arguments.append((uspec_constraints, tests))
|
||||
args.append((i, uspec_constraints, tests))
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
@@ -1497,34 +1504,36 @@ def _concretize_separately(self, tests=False):
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(arguments) == 0:
|
||||
if len(args) == 0:
|
||||
return []
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
start = time.time()
|
||||
max_processes = min(
|
||||
len(arguments), # Number of specs
|
||||
spack.util.cpus.determine_number_of_jobs(parallel=True),
|
||||
)
|
||||
num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True))
|
||||
|
||||
# TODO: revisit this print as soon as darwin is parallel too
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
msg = "Starting concretization"
|
||||
if sys.platform != "darwin":
|
||||
pool_size = spack.util.parallel.num_processes(max_processes=max_processes)
|
||||
if pool_size > 1:
|
||||
msg = msg + " pool with {0} processes".format(pool_size)
|
||||
if sys.platform not in ("darwin", "win32") and num_procs > 1:
|
||||
msg += f" pool with {num_procs} processes"
|
||||
tty.msg(msg)
|
||||
|
||||
concretized_root_specs = spack.util.parallel.parallel_map(
|
||||
_concretize_task, arguments, max_processes=max_processes, debug=tty.is_debug()
|
||||
)
|
||||
batch = []
|
||||
for i, concrete, duration in spack.util.parallel.imap_unordered(
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug()
|
||||
):
|
||||
batch.append((i, concrete))
|
||||
tty.verbose(f"[{duration:7.2f}s] {root_specs[i]}")
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
batch.sort(key=lambda x: x[0])
|
||||
by_hash = {} # for attaching information on test dependencies
|
||||
for root, (_, concrete) in zip(root_specs, batch):
|
||||
self._add_concrete_spec(root, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
finish = time.time()
|
||||
tty.msg("Environment concretized in %.2f seconds." % (finish - start))
|
||||
by_hash = {}
|
||||
for abstract, concrete in zip(root_specs, concretized_root_specs):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
tty.msg(f"Environment concretized in {finish - start:.2f} seconds")
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
@@ -1536,13 +1545,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -1591,16 +1600,14 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
|
||||
@property
|
||||
def default_view(self):
|
||||
if not self.views:
|
||||
raise SpackEnvironmentError("{0} does not have a view enabled".format(self.name))
|
||||
|
||||
if default_view_name not in self.views:
|
||||
raise SpackEnvironmentError(
|
||||
"{0} does not have a default view enabled".format(self.name)
|
||||
)
|
||||
if not self.has_view(default_view_name):
|
||||
raise SpackEnvironmentError(f"{self.name} does not have a default view enabled")
|
||||
|
||||
return self.views[default_view_name]
|
||||
|
||||
def has_view(self, view_name: str) -> bool:
|
||||
return view_name in self.views
|
||||
|
||||
def update_default_view(self, path_or_bool: Union[str, bool]) -> None:
|
||||
"""Updates the path of the default view.
|
||||
|
||||
@@ -1686,62 +1693,34 @@ def check_views(self):
|
||||
"Loading the environment view will require reconcretization." % self.name
|
||||
)
|
||||
|
||||
def _env_modifications_for_default_view(self, reverse=False):
|
||||
all_mods = spack.util.environment.EnvironmentModifications()
|
||||
def _env_modifications_for_view(
|
||||
self, view: ViewDescriptor, reverse: bool = False
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
try:
|
||||
mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view)
|
||||
except Exception as e:
|
||||
# Failing to setup spec-specific changes shouldn't be a hard error.
|
||||
tty.warn(
|
||||
"couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e)
|
||||
)
|
||||
return spack.util.environment.EnvironmentModifications()
|
||||
return mods.reversed() if reverse else mods
|
||||
|
||||
visited = set()
|
||||
|
||||
errors = []
|
||||
for root_spec in self.concrete_roots():
|
||||
if root_spec in self.default_view and root_spec.installed and root_spec.package:
|
||||
for spec in root_spec.traverse(deptype="run", root=True):
|
||||
if spec.name in visited:
|
||||
# It is expected that only one instance of the package
|
||||
# can be added to the environment - do not attempt to
|
||||
# add multiple.
|
||||
tty.debug(
|
||||
"Not adding {0} to shell modifications: "
|
||||
"this package has already been added".format(
|
||||
spec.format("{name}/{hash:7}")
|
||||
)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
visited.add(spec.name)
|
||||
|
||||
try:
|
||||
mods = uenv.environment_modifications_for_spec(spec, self.default_view)
|
||||
except Exception as e:
|
||||
msg = "couldn't get environment settings for %s" % spec.format(
|
||||
"{name}@{version} /{hash:7}"
|
||||
)
|
||||
errors.append((msg, str(e)))
|
||||
continue
|
||||
|
||||
all_mods.extend(mods.reversed() if reverse else mods)
|
||||
|
||||
return all_mods, errors
|
||||
|
||||
def add_default_view_to_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to activate an environment using the
|
||||
default view. Removes duplicate paths.
|
||||
def add_view_to_env(
|
||||
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
"""Collect the environment modifications to activate an environment using the provided
|
||||
view. Removes duplicate paths.
|
||||
|
||||
Args:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
env_mod: the environment modifications object that is modified.
|
||||
view: the name of the view to activate."""
|
||||
descriptor = self.views.get(view)
|
||||
if not descriptor:
|
||||
return env_mod
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view))
|
||||
|
||||
mods, errors = self._env_modifications_for_default_view()
|
||||
env_mod.extend(mods)
|
||||
if errors:
|
||||
for err in errors:
|
||||
tty.warn(*err)
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(descriptor))
|
||||
env_mod.extend(self._env_modifications_for_view(descriptor))
|
||||
|
||||
# deduplicate paths from specs mapped to the same location
|
||||
for env_var in env_mod.group_by_name():
|
||||
@@ -1749,23 +1728,21 @@ def add_default_view_to_env(self, env_mod):
|
||||
|
||||
return env_mod
|
||||
|
||||
def rm_default_view_from_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to deactivate an environment using the
|
||||
default view. Reverses the action of ``add_default_view_to_env``.
|
||||
def rm_view_from_env(
|
||||
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
"""Collect the environment modifications to deactivate an environment using the provided
|
||||
view. Reverses the action of ``add_view_to_env``.
|
||||
|
||||
Args:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
env_mod: the environment modifications object that is modified.
|
||||
view: the name of the view to deactivate."""
|
||||
descriptor = self.views.get(view)
|
||||
if not descriptor:
|
||||
return env_mod
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view).reversed())
|
||||
|
||||
mods, _ = self._env_modifications_for_default_view(reverse=True)
|
||||
env_mod.extend(mods)
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed())
|
||||
env_mod.extend(self._env_modifications_for_view(descriptor, reverse=True))
|
||||
|
||||
return env_mod
|
||||
|
||||
@@ -2190,7 +2167,7 @@ def _read_lockfile_dict(self, d):
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
@@ -2418,10 +2395,12 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments):
|
||||
spec_constraints, tests = packed_arguments
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_constraints, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
return _concretize_from_constraints(spec_constraints, tests)
|
||||
start = time.time()
|
||||
spec = _concretize_from_constraints(spec_constraints, tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import colorize
|
||||
@@ -13,12 +14,14 @@
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
def activate_header(env, shell, prompt=None):
|
||||
def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
# Construct the commands to run
|
||||
cmds = ""
|
||||
if shell == "csh":
|
||||
# TODO: figure out how to make color work for csh
|
||||
cmds += "setenv SPACK_ENV %s;\n" % env.path
|
||||
if view:
|
||||
cmds += "setenv SPACK_ENV_VIEW %s;\n" % view
|
||||
cmds += 'alias despacktivate "spack env deactivate";\n'
|
||||
if prompt:
|
||||
cmds += "if (! $?SPACK_OLD_PROMPT ) "
|
||||
@@ -29,6 +32,8 @@ def activate_header(env, shell, prompt=None):
|
||||
prompt = colorize("@G{%s} " % prompt, color=True)
|
||||
|
||||
cmds += "set -gx SPACK_ENV %s;\n" % env.path
|
||||
if view:
|
||||
cmds += "set -gx SPACK_ENV_VIEW %s;\n" % view
|
||||
cmds += "function despacktivate;\n"
|
||||
cmds += " spack env deactivate;\n"
|
||||
cmds += "end;\n"
|
||||
@@ -40,15 +45,21 @@ def activate_header(env, shell, prompt=None):
|
||||
elif shell == "bat":
|
||||
# TODO: Color
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
|
||||
cmds += "export SPACK_ENV=%s;\n" % env.path
|
||||
if view:
|
||||
cmds += "export SPACK_ENV_VIEW=%s;\n" % view
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
if prompt:
|
||||
cmds += "if [ -z ${SPACK_OLD_PS1+x} ]; then\n"
|
||||
@@ -66,12 +77,14 @@ def deactivate_header(shell):
|
||||
cmds = ""
|
||||
if shell == "csh":
|
||||
cmds += "unsetenv SPACK_ENV;\n"
|
||||
cmds += "unsetenv SPACK_ENV_VIEW;\n"
|
||||
cmds += "if ( $?SPACK_OLD_PROMPT ) "
|
||||
cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&'
|
||||
cmds += " unsetenv SPACK_OLD_PROMPT';\n"
|
||||
cmds += "unalias despacktivate;\n"
|
||||
elif shell == "fish":
|
||||
cmds += "set -e SPACK_ENV;\n"
|
||||
cmds += "set -e SPACK_ENV_VIEW;\n"
|
||||
cmds += "functions -e despacktivate;\n"
|
||||
#
|
||||
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
|
||||
@@ -79,14 +92,19 @@ def deactivate_header(shell):
|
||||
elif shell == "bat":
|
||||
# TODO: Color
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
cmds += 'set "SPACK_ENV_VIEW="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
cmds += "fi;\n"
|
||||
cmds += "if [ ! -z ${SPACK_ENV_VIEW+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV_VIEW; export SPACK_ENV_VIEW;\n"
|
||||
cmds += "fi;\n"
|
||||
cmds += "alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n"
|
||||
cmds += "if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n"
|
||||
cmds += " if [ \"$SPACK_OLD_PS1\" = '$$$$' ]; then\n"
|
||||
@@ -100,24 +118,23 @@ def deactivate_header(shell):
|
||||
return cmds
|
||||
|
||||
|
||||
def activate(env, use_env_repo=False, add_view=True):
|
||||
"""
|
||||
Activate an environment and append environment modifications
|
||||
def activate(
|
||||
env: ev.Environment, use_env_repo=False, view: Optional[str] = "default"
|
||||
) -> EnvironmentModifications:
|
||||
"""Activate an environment and append environment modifications
|
||||
|
||||
To activate an environment, we add its configuration scope to the
|
||||
existing Spack configuration, and we set active to the current
|
||||
environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): the environment to activate
|
||||
use_env_repo (bool): use the packages exactly as they appear in the
|
||||
environment's repository
|
||||
add_view (bool): generate commands to add view to path variables
|
||||
env: the environment to activate
|
||||
use_env_repo: use the packages exactly as they appear in the environment's repository
|
||||
view: generate commands to add runtime environment variables for named view
|
||||
|
||||
Returns:
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment.
|
||||
"""
|
||||
modifications to activate environment."""
|
||||
ev.activate(env, use_env_repo=use_env_repo)
|
||||
|
||||
env_mods = EnvironmentModifications()
|
||||
@@ -129,9 +146,9 @@ def activate(env, use_env_repo=False, add_view=True):
|
||||
# become PATH variables.
|
||||
#
|
||||
try:
|
||||
if add_view and ev.default_view_name in env.views:
|
||||
if view and env.has_view(view):
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env.add_default_view_to_env(env_mods)
|
||||
env.add_view_to_env(env_mods, view)
|
||||
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
|
||||
tty.error(e)
|
||||
tty.die(
|
||||
@@ -145,17 +162,15 @@ def activate(env, use_env_repo=False, add_view=True):
|
||||
return env_mods
|
||||
|
||||
|
||||
def deactivate():
|
||||
"""
|
||||
Deactivate an environment and collect corresponding environment modifications.
|
||||
def deactivate() -> EnvironmentModifications:
|
||||
"""Deactivate an environment and collect corresponding environment modifications.
|
||||
|
||||
Note: unloads the environment in its current state, not in the state it was
|
||||
loaded in, meaning that specs that were removed from the spack environment
|
||||
after activation are not unloaded.
|
||||
|
||||
Returns:
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment.
|
||||
Environment variables modifications to activate environment.
|
||||
"""
|
||||
env_mods = EnvironmentModifications()
|
||||
active = ev.active_environment()
|
||||
@@ -163,10 +178,12 @@ def deactivate():
|
||||
if active is None:
|
||||
return env_mods
|
||||
|
||||
if ev.default_view_name in active.views:
|
||||
active_view = os.getenv(ev.spack_env_view_var)
|
||||
|
||||
if active_view and active.has_view(active_view):
|
||||
try:
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
active.rm_default_view_from_env(env_mods)
|
||||
active.rm_view_from_env(env_mods, active_view)
|
||||
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn(
|
||||
|
||||
@@ -128,3 +128,7 @@ def __init__(self, provided, required, constraint_type):
|
||||
self.provided = provided
|
||||
self.required = required
|
||||
self.constraint_type = constraint_type
|
||||
|
||||
|
||||
class FetchError(SpackError):
|
||||
"""Superclass for fetch-related errors."""
|
||||
|
||||
@@ -31,9 +31,11 @@
|
||||
import urllib.parse
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import comma_and, quote
|
||||
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
@@ -46,9 +48,8 @@
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for, extension_from_path
|
||||
from spack.util.compression import decompressor_for
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.string import comma_and, quote
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
@@ -400,7 +401,7 @@ def _fetch_curl(self, url):
|
||||
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except web_util.FetchError as err:
|
||||
except spack.error.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
|
||||
self._check_headers(headers)
|
||||
@@ -441,7 +442,7 @@ def expand(self):
|
||||
|
||||
# TODO: replace this by mime check.
|
||||
if not self.extension:
|
||||
self.extension = spack.url.determine_url_file_extension(self.url)
|
||||
self.extension = llnl.url.determine_url_file_extension(self.url)
|
||||
|
||||
if self.stage.expanded:
|
||||
tty.debug("Source already staged to %s" % self.stage.source_path)
|
||||
@@ -570,7 +571,7 @@ def expand(self):
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, **kwargs):
|
||||
assert extension_from_path(destination) == "tar.gz"
|
||||
assert llnl.url.extension_from_path(destination) == "tar.gz"
|
||||
assert self.stage.source_path.startswith(self.stage.path)
|
||||
|
||||
tar = which("tar", required=True)
|
||||
@@ -733,7 +734,11 @@ def version_from_git(git_exe):
|
||||
@property
|
||||
def git(self):
|
||||
if not self._git:
|
||||
self._git = spack.util.git.git()
|
||||
try:
|
||||
self._git = spack.util.git.git(required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise
|
||||
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
@@ -1289,7 +1294,7 @@ def fetch(self):
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
@@ -1336,7 +1341,7 @@ def fetch(self):
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
@@ -1430,7 +1435,7 @@ def from_kwargs(**kwargs):
|
||||
on attribute names (e.g., ``git``, ``hg``, etc.)
|
||||
|
||||
Raises:
|
||||
spack.util.web.FetchError: If no ``fetch_strategy`` matches the args.
|
||||
spack.error.FetchError: If no ``fetch_strategy`` matches the args.
|
||||
"""
|
||||
for fetcher in all_strategies:
|
||||
if fetcher.matches(kwargs):
|
||||
@@ -1537,7 +1542,7 @@ def for_package_version(pkg, version=None):
|
||||
# if it's a commit, we must use a GitFetchStrategy
|
||||
if isinstance(version, spack.version.GitVersion):
|
||||
if not hasattr(pkg, "git"):
|
||||
raise web_util.FetchError(
|
||||
raise spack.error.FetchError(
|
||||
f"Cannot fetch git version for {pkg.name}. Package has no 'git' attribute"
|
||||
)
|
||||
# Populate the version with comparisons to other commits
|
||||
@@ -1687,11 +1692,11 @@ def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
|
||||
|
||||
class NoCacheError(web_util.FetchError):
|
||||
class NoCacheError(spack.error.FetchError):
|
||||
"""Raised when there is no cached archive for a package."""
|
||||
|
||||
|
||||
class FailedDownloadError(web_util.FetchError):
|
||||
class FailedDownloadError(spack.error.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, url, msg=""):
|
||||
@@ -1699,23 +1704,23 @@ def __init__(self, url, msg=""):
|
||||
self.url = url
|
||||
|
||||
|
||||
class NoArchiveFileError(web_util.FetchError):
|
||||
class NoArchiveFileError(spack.error.FetchError):
|
||||
"""Raised when an archive file is expected but none exists."""
|
||||
|
||||
|
||||
class NoDigestError(web_util.FetchError):
|
||||
class NoDigestError(spack.error.FetchError):
|
||||
"""Raised after attempt to checksum when URL has no digest."""
|
||||
|
||||
|
||||
class ExtrapolationError(web_util.FetchError):
|
||||
class ExtrapolationError(spack.error.FetchError):
|
||||
"""Raised when we can't extrapolate a version for a package."""
|
||||
|
||||
|
||||
class FetcherConflict(web_util.FetchError):
|
||||
class FetcherConflict(spack.error.FetchError):
|
||||
"""Raised for packages with invalid fetch attributes."""
|
||||
|
||||
|
||||
class InvalidArgsError(web_util.FetchError):
|
||||
class InvalidArgsError(spack.error.FetchError):
|
||||
"""Raised when a version can't be deduced from a set of arguments."""
|
||||
|
||||
def __init__(self, pkg=None, version=None, **args):
|
||||
@@ -1728,11 +1733,11 @@ def __init__(self, pkg=None, version=None, **args):
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class ChecksumError(web_util.FetchError):
|
||||
class ChecksumError(spack.error.FetchError):
|
||||
"""Raised when archive fails to checksum."""
|
||||
|
||||
|
||||
class NoStageError(web_util.FetchError):
|
||||
class NoStageError(spack.error.FetchError):
|
||||
"""Raised when fetch operations are called before set_stage()."""
|
||||
|
||||
def __init__(self, method):
|
||||
|
||||
@@ -500,7 +500,7 @@ def get_projection_for_spec(self, spec):
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, locator_spec)
|
||||
if proj:
|
||||
return os.path.join(self._root, locator_spec.format(proj))
|
||||
return os.path.join(self._root, locator_spec.format_path(proj))
|
||||
return self._root
|
||||
|
||||
def get_all_specs(self):
|
||||
@@ -776,7 +776,7 @@ def get_relative_projection_for_spec(self, spec):
|
||||
spec = spec.package.extendee_spec
|
||||
|
||||
p = spack.projections.get_projection(self.projections, spec)
|
||||
return spec.format(p) if p else ""
|
||||
return spec.format_path(p) if p else ""
|
||||
|
||||
def get_projection_for_spec(self, spec):
|
||||
"""
|
||||
@@ -791,7 +791,7 @@ def get_projection_for_spec(self, spec):
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, spec)
|
||||
if proj:
|
||||
return os.path.join(self._root, spec.format(proj))
|
||||
return os.path.join(self._root, spec.format_path(proj))
|
||||
return self._root
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user