Compare commits
800 Commits
v0.20.0
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a864108bc1 | ||
|
|
4aed051b73 | ||
|
|
43996e84c3 | ||
|
|
14ebf1985e | ||
|
|
0c2b98ca70 | ||
|
|
43143b134a | ||
|
|
45697582dc | ||
|
|
9a214ffb75 | ||
|
|
4286c7398b | ||
|
|
131acbdacc | ||
|
|
86d2399c76 | ||
|
|
d89d6dab6d | ||
|
|
e6c94e9126 | ||
|
|
af5b93bb97 | ||
|
|
dc25da1931 | ||
|
|
067e40591a | ||
|
|
483688580e | ||
|
|
7448acaf98 | ||
|
|
dfe2d5dca2 | ||
|
|
b980fcff64 | ||
|
|
b405559e7b | ||
|
|
7c5e3ddac5 | ||
|
|
6ffc11c46d | ||
|
|
a45d129f86 | ||
|
|
3ccc527d74 | ||
|
|
51c75c6da3 | ||
|
|
c3f4822f92 | ||
|
|
ccae0ad534 | ||
|
|
95fffe16a8 | ||
|
|
640ccf2ef9 | ||
|
|
78f33bc002 | ||
|
|
25cc734452 | ||
|
|
71b17a6945 | ||
|
|
9425df1259 | ||
|
|
e81076edd0 | ||
|
|
84043d97b7 | ||
|
|
f93b61338c | ||
|
|
526ae84137 | ||
|
|
d960d29485 | ||
|
|
5388ab1ac1 | ||
|
|
9ce075ed3d | ||
|
|
46419502cb | ||
|
|
50623f6bde | ||
|
|
d8922233ce | ||
|
|
26a98f4c14 | ||
|
|
b6b33cfe7a | ||
|
|
41582f76bd | ||
|
|
97972d300a | ||
|
|
7470d14b35 | ||
|
|
ac825bd9d4 | ||
|
|
e24bf70af4 | ||
|
|
dfbdcaf551 | ||
|
|
cc5ea14a6e | ||
|
|
efd2ed750d | ||
|
|
ab10b645c6 | ||
|
|
e79f275bc9 | ||
|
|
09b4ed6c80 | ||
|
|
66f75407d1 | ||
|
|
5db241c755 | ||
|
|
1dcc67535a | ||
|
|
46fe1f48bc | ||
|
|
30201e3381 | ||
|
|
501bb88de2 | ||
|
|
c5adb05433 | ||
|
|
8528106484 | ||
|
|
134dceb055 | ||
|
|
aa3744299b | ||
|
|
105ac0c377 | ||
|
|
1949f67a71 | ||
|
|
0314071763 | ||
|
|
6e13d7d917 | ||
|
|
2d4758bdd9 | ||
|
|
ff002316a8 | ||
|
|
251282812b | ||
|
|
4ac43b5032 | ||
|
|
05b6ac16bc | ||
|
|
8164712264 | ||
|
|
ce0b9ea8cf | ||
|
|
c560053c39 | ||
|
|
5b0ca6d287 | ||
|
|
887d356e01 | ||
|
|
95ca9dea89 | ||
|
|
cb23362b7f | ||
|
|
42c4a8b388 | ||
|
|
cc1f403385 | ||
|
|
b1d281f197 | ||
|
|
29a1c418b3 | ||
|
|
36dd325187 | ||
|
|
585e150816 | ||
|
|
9a30ba1a4d | ||
|
|
d5bb152165 | ||
|
|
0c6d0541f0 | ||
|
|
7b977dc103 | ||
|
|
f98bfebce4 | ||
|
|
4907315079 | ||
|
|
48168de1cc | ||
|
|
d99892e490 | ||
|
|
09d9b48957 | ||
|
|
62aa9d87ee | ||
|
|
0470fe545f | ||
|
|
db8bf333d3 | ||
|
|
f73c8f2255 | ||
|
|
42ed4d81b7 | ||
|
|
e76b039997 | ||
|
|
b49d098e3f | ||
|
|
cd67b2a1a9 | ||
|
|
a076548bd4 | ||
|
|
3d342ac69a | ||
|
|
88fc8ae591 | ||
|
|
ff6ac42812 | ||
|
|
c96f93b2a3 | ||
|
|
cbe4a48291 | ||
|
|
ebd41134fe | ||
|
|
77817a0f05 | ||
|
|
590d3ba6cf | ||
|
|
1e8988f11d | ||
|
|
a889669cbc | ||
|
|
fde33e66be | ||
|
|
6314ddacf2 | ||
|
|
f935f36b21 | ||
|
|
082934f73f | ||
|
|
3e9e01e496 | ||
|
|
2abbfe719d | ||
|
|
ace20c5d29 | ||
|
|
f35fcee6be | ||
|
|
103370d84a | ||
|
|
6e47f1645f | ||
|
|
53eb6c46db | ||
|
|
00d769d644 | ||
|
|
b6b34aa0fe | ||
|
|
83b9196e78 | ||
|
|
ed76eab694 | ||
|
|
930b843885 | ||
|
|
f53c68e005 | ||
|
|
12a22eebc7 | ||
|
|
69eb15936c | ||
|
|
c69dea5465 | ||
|
|
3bd8c4df28 | ||
|
|
02dc697831 | ||
|
|
87cb39b860 | ||
|
|
468138bb4f | ||
|
|
a8534b7345 | ||
|
|
8ba45b0b99 | ||
|
|
6ae358edd8 | ||
|
|
5ce45e0cee | ||
|
|
b2901f1737 | ||
|
|
6b552dedbc | ||
|
|
6b3d2c535f | ||
|
|
2727bd92d8 | ||
|
|
ebbfc0363b | ||
|
|
1b6e1fc852 | ||
|
|
1376ec4887 | ||
|
|
0eec7c5c53 | ||
|
|
05dd240997 | ||
|
|
fb16c81b6c | ||
|
|
7c3b33416f | ||
|
|
6755b74d22 | ||
|
|
d0e843ce03 | ||
|
|
37f6231f2a | ||
|
|
d85f25a901 | ||
|
|
2041b92d3a | ||
|
|
f461069888 | ||
|
|
9eb3de85c5 | ||
|
|
92d970498a | ||
|
|
bd5e99120d | ||
|
|
a7e307bd81 | ||
|
|
55152781cb | ||
|
|
8ce0c7771c | ||
|
|
7e0dfa270f | ||
|
|
7dc485d288 | ||
|
|
5c6c3b403b | ||
|
|
242854f266 | ||
|
|
e9406a7d9e | ||
|
|
0ac1c52d17 | ||
|
|
a3c42715db | ||
|
|
0f27188383 | ||
|
|
99f3b9f064 | ||
|
|
d1bc4c4ef1 | ||
|
|
69a5c55702 | ||
|
|
2972d5847c | ||
|
|
1577eb9602 | ||
|
|
2f97c6ead2 | ||
|
|
1df4afb53f | ||
|
|
4991f0e484 | ||
|
|
09fd7d68eb | ||
|
|
2ace8a55c1 | ||
|
|
861acb9467 | ||
|
|
eea743de46 | ||
|
|
e2b6e5a7ec | ||
|
|
2f2dc3695c | ||
|
|
6eb5e57199 | ||
|
|
9a047eb95f | ||
|
|
ef42fd7a2f | ||
|
|
e642c2ea28 | ||
|
|
f27d012e0c | ||
|
|
c638311796 | ||
|
|
2a02bea405 | ||
|
|
219b42d991 | ||
|
|
c290ec1f62 | ||
|
|
e7ede86733 | ||
|
|
e3e7609af4 | ||
|
|
49d7ebec36 | ||
|
|
7c3d82d819 | ||
|
|
1c0fbec9ce | ||
|
|
ca4d60ae25 | ||
|
|
dc571e20d6 | ||
|
|
1485275d0c | ||
|
|
1afbf72037 | ||
|
|
407fd80f95 | ||
|
|
62525d9076 | ||
|
|
c2371263d1 | ||
|
|
5a870182ec | ||
|
|
e33ad83256 | ||
|
|
0352a1df5d | ||
|
|
ade44bce62 | ||
|
|
ddb29ebc34 | ||
|
|
19a62630e5 | ||
|
|
5626802aa0 | ||
|
|
f68063afbc | ||
|
|
8103d019d6 | ||
|
|
ce89cdd9d7 | ||
|
|
20d9b356f0 | ||
|
|
3401438a3a | ||
|
|
dcf1999d22 | ||
|
|
9e3c3ae298 | ||
|
|
40d6b84b4d | ||
|
|
2db09f27af | ||
|
|
6979d6a96f | ||
|
|
deffd2acc9 | ||
|
|
988f71f434 | ||
|
|
4fe76f973a | ||
|
|
8e4e6ad529 | ||
|
|
3586a2dbe3 | ||
|
|
4648939043 | ||
|
|
746eaaf01a | ||
|
|
bd2f78ae9a | ||
|
|
a4ebe01dec | ||
|
|
94e9e18558 | ||
|
|
d2e0ac4d1f | ||
|
|
36321fef1c | ||
|
|
e879877878 | ||
|
|
f0bce3eb25 | ||
|
|
316bfd8b7d | ||
|
|
92593fecd5 | ||
|
|
8db5fecdf5 | ||
|
|
eee696f320 | ||
|
|
8689cf392f | ||
|
|
15d4cce2eb | ||
|
|
45fbb82d1a | ||
|
|
2861c89b89 | ||
|
|
135bfeeb27 | ||
|
|
8fa9c66a7d | ||
|
|
5e6174cbe2 | ||
|
|
b4ad883b0d | ||
|
|
a681111a23 | ||
|
|
d2436afb66 | ||
|
|
e43444cbb6 | ||
|
|
8c0d947114 | ||
|
|
5ba4a2b83a | ||
|
|
da45073ef9 | ||
|
|
61e17fb36d | ||
|
|
9f13a90dd2 | ||
|
|
ef4b35ea63 | ||
|
|
66187c8a6e | ||
|
|
c8d95512fc | ||
|
|
c74fa648b9 | ||
|
|
4cc5e9cac6 | ||
|
|
41345d18f9 | ||
|
|
0dd1316b68 | ||
|
|
d8cc185e22 | ||
|
|
061051270c | ||
|
|
61445159db | ||
|
|
7fa3c7f0fa | ||
|
|
9c0fe30f42 | ||
|
|
d00010819f | ||
|
|
248b05b32a | ||
|
|
8232e934e9 | ||
|
|
9d005839af | ||
|
|
a7e5c73608 | ||
|
|
7896625919 | ||
|
|
fb43cb8166 | ||
|
|
28f68e5d11 | ||
|
|
1199eeed0b | ||
|
|
8ffeb4900b | ||
|
|
456550da3f | ||
|
|
b2676fe2dd | ||
|
|
8561ec6249 | ||
|
|
5b775d82ac | ||
|
|
b43088cc16 | ||
|
|
237eab136a | ||
|
|
ffffa2794b | ||
|
|
433b44403f | ||
|
|
fa2e1c0653 | ||
|
|
00257f6824 | ||
|
|
3b8366f3d3 | ||
|
|
a73f511404 | ||
|
|
c823e01baf | ||
|
|
4188080899 | ||
|
|
ef6ea2c93f | ||
|
|
3c672905d0 | ||
|
|
ee106c747f | ||
|
|
295726e6b8 | ||
|
|
2654d64a3c | ||
|
|
d91ec8500f | ||
|
|
c354cc51d0 | ||
|
|
d5747a61e7 | ||
|
|
e88c747abc | ||
|
|
cfe9e5bca4 | ||
|
|
48f7655a62 | ||
|
|
a1111a9858 | ||
|
|
b8b9a798bf | ||
|
|
7a1e94c775 | ||
|
|
8c4b2173d2 | ||
|
|
4c4cd7b3ea | ||
|
|
e92554414b | ||
|
|
d165e2c94b | ||
|
|
a97bd31afe | ||
|
|
d7719b26f9 | ||
|
|
855c0fd9e0 | ||
|
|
4156397027 | ||
|
|
b4bbe5e305 | ||
|
|
f5b595071e | ||
|
|
b6f2184cce | ||
|
|
9288067380 | ||
|
|
ddfc43be96 | ||
|
|
63cad5d338 | ||
|
|
436ecdfb19 | ||
|
|
06817600e4 | ||
|
|
4ae1a73d54 | ||
|
|
f29aab0d03 | ||
|
|
cea1b3123e | ||
|
|
b22ccf279d | ||
|
|
81e15ce36e | ||
|
|
8907e52933 | ||
|
|
80cefedac5 | ||
|
|
b85a66f77a | ||
|
|
a0ba3d890a | ||
|
|
315873cbd3 | ||
|
|
e05095af90 | ||
|
|
e0d6a73f96 | ||
|
|
6ebfb41ad9 | ||
|
|
d0aa01c807 | ||
|
|
1265c7df47 | ||
|
|
91e3f14959 | ||
|
|
5f03eb650d | ||
|
|
e0e6133444 | ||
|
|
ee68baf254 | ||
|
|
785c1a2070 | ||
|
|
79656655ba | ||
|
|
74921788a8 | ||
|
|
b313b28e64 | ||
|
|
5f1bc15e80 | ||
|
|
fa9fb60df3 | ||
|
|
e759e6c410 | ||
|
|
f41446258a | ||
|
|
268649654d | ||
|
|
12e249f64e | ||
|
|
c34cd76f2a | ||
|
|
815b210fc8 | ||
|
|
e5d5efb4c1 | ||
|
|
0aa4b4d990 | ||
|
|
01c1d334ae | ||
|
|
717fc11a46 | ||
|
|
d21c49e329 | ||
|
|
6937d9dddc | ||
|
|
4c2531d5fb | ||
|
|
62fd890c52 | ||
|
|
4772fd7723 | ||
|
|
7c11faceb0 | ||
|
|
053550e28a | ||
|
|
3ed7258447 | ||
|
|
a5cf5baa9e | ||
|
|
ec8039cc74 | ||
|
|
9bfa840c27 | ||
|
|
9865f42335 | ||
|
|
dba2829871 | ||
|
|
8c0e1fbed9 | ||
|
|
187488b75b | ||
|
|
2aa35fef3e | ||
|
|
d373fc36ae | ||
|
|
e483762015 | ||
|
|
5840a00000 | ||
|
|
110f836927 | ||
|
|
d6765f66ae | ||
|
|
19dac780e8 | ||
|
|
b82b549c59 | ||
|
|
b376401ece | ||
|
|
7d956dbe9e | ||
|
|
6db1d84bb0 | ||
|
|
2094fa3056 | ||
|
|
3d255bc213 | ||
|
|
5538dda722 | ||
|
|
1c0d89bf25 | ||
|
|
4cc0199fbb | ||
|
|
edb8226fff | ||
|
|
ef972cf642 | ||
|
|
50c13541e4 | ||
|
|
fd5d7cea6e | ||
|
|
526314b275 | ||
|
|
7b37c30019 | ||
|
|
dc03c3ad9e | ||
|
|
61b485f75d | ||
|
|
e24151783f | ||
|
|
ed9714e5ae | ||
|
|
ea620a083c | ||
|
|
504a8be666 | ||
|
|
d460870c77 | ||
|
|
f0f77251b3 | ||
|
|
bdd454b70b | ||
|
|
aea6662774 | ||
|
|
fe6bcb36c7 | ||
|
|
2474a2efe1 | ||
|
|
4cfd49019c | ||
|
|
7beae8af30 | ||
|
|
22fc5d2039 | ||
|
|
b70fc461a4 | ||
|
|
e756436d7c | ||
|
|
8dd87e2572 | ||
|
|
853bf95bd2 | ||
|
|
1c80d07fd2 | ||
|
|
6fd8001604 | ||
|
|
c08f9fd6fc | ||
|
|
c3fb998414 | ||
|
|
3368a98210 | ||
|
|
606b7c7f16 | ||
|
|
2f4e66be09 | ||
|
|
9ce3e8707c | ||
|
|
d6a96745ee | ||
|
|
a0fcdd092b | ||
|
|
e17d09e607 | ||
|
|
847d67f223 | ||
|
|
7ae0e06a62 | ||
|
|
d3df97df8b | ||
|
|
7d5d075809 | ||
|
|
237a0d8999 | ||
|
|
6952ed9950 | ||
|
|
3e2d1bd413 | ||
|
|
9dfba4659e | ||
|
|
7fca252aa4 | ||
|
|
fa23a0228f | ||
|
|
ed76966a3a | ||
|
|
2015a51d1a | ||
|
|
34b8fe827e | ||
|
|
6f1ed9b2e4 | ||
|
|
dd00f50943 | ||
|
|
f0ec625321 | ||
|
|
d406c371a8 | ||
|
|
42d374a34d | ||
|
|
d90e4fcc3d | ||
|
|
a44fde9dc9 | ||
|
|
9ac8841dab | ||
|
|
a1f87638ec | ||
|
|
3b55e0a65d | ||
|
|
42667fe7fa | ||
|
|
cd27611d2f | ||
|
|
b111d2172e | ||
|
|
055263fa3c | ||
|
|
f34f207bdc | ||
|
|
0c9f0fd40d | ||
|
|
24d5b1e645 | ||
|
|
616f7bcaef | ||
|
|
dace0316a2 | ||
|
|
3bb86418b8 | ||
|
|
6f6489a2c7 | ||
|
|
543b697df1 | ||
|
|
042dc2e1d8 | ||
|
|
f745e49d9a | ||
|
|
eda21cdfba | ||
|
|
bc8b026072 | ||
|
|
0f84782fcc | ||
|
|
43b86ce282 | ||
|
|
d30698d9a8 | ||
|
|
8e9efa86c8 | ||
|
|
84faf5a6cf | ||
|
|
9428749a3c | ||
|
|
efdac68c28 | ||
|
|
5398c31e82 | ||
|
|
188168c476 | ||
|
|
4af84ac208 | ||
|
|
deb8b51098 | ||
|
|
0d582b2ea9 | ||
|
|
f88b01c34b | ||
|
|
0533c6a1b8 | ||
|
|
f73d5c2b0e | ||
|
|
567d0ee455 | ||
|
|
577df6f498 | ||
|
|
8790efbcfe | ||
|
|
212b1edb6b | ||
|
|
d85a27f317 | ||
|
|
5622afbfd1 | ||
|
|
f345038317 | ||
|
|
e43d4cfee0 | ||
|
|
7070658e2a | ||
|
|
fc4b032fb4 | ||
|
|
8c97d8ad3f | ||
|
|
26107fe6b2 | ||
|
|
9278c0df21 | ||
|
|
37e95713f4 | ||
|
|
3ae8a3a517 | ||
|
|
031af84e90 | ||
|
|
7d4b65491d | ||
|
|
3038d1e7cd | ||
|
|
b2e6ef97ce | ||
|
|
e55236ce5b | ||
|
|
68dfd6ba6e | ||
|
|
38d2459f94 | ||
|
|
e309f367af | ||
|
|
3b59c95323 | ||
|
|
fddaeadff8 | ||
|
|
c85eaf9dc5 | ||
|
|
ddec7f8aec | ||
|
|
f057d7154b | ||
|
|
a102950d67 | ||
|
|
783be9b350 | ||
|
|
27c8135207 | ||
|
|
77ce4701b9 | ||
|
|
73ad3f729e | ||
|
|
1e7a64ad85 | ||
|
|
3a5864bcdb | ||
|
|
7e13a7dccb | ||
|
|
e3249fa155 | ||
|
|
0c20760576 | ||
|
|
7ee7995493 | ||
|
|
ba1fac1c31 | ||
|
|
b05f0ecb6f | ||
|
|
d5c66b75c3 | ||
|
|
98303d6956 | ||
|
|
4622d638a6 | ||
|
|
02023265fc | ||
|
|
8a075998f8 | ||
|
|
f2f48b1872 | ||
|
|
168d63c447 | ||
|
|
c25d4cbc1d | ||
|
|
ccb07538f7 | ||
|
|
1356b13b2f | ||
|
|
935f862863 | ||
|
|
9f6d9df302 | ||
|
|
65d33c02a1 | ||
|
|
40073e7b21 | ||
|
|
752e02e2f2 | ||
|
|
d717b3a33f | ||
|
|
9817f24c9a | ||
|
|
1f7c4b0557 | ||
|
|
6c42d2b7f7 | ||
|
|
8df036a5a5 | ||
|
|
582ebee74c | ||
|
|
1017b9ddde | ||
|
|
80ae73119d | ||
|
|
1d88f690a4 | ||
|
|
fbb271d804 | ||
|
|
d6aac873b7 | ||
|
|
ab3ffd9361 | ||
|
|
3b9454a5cc | ||
|
|
c8eb0f9361 | ||
|
|
fb0f14eb06 | ||
|
|
e489ee4e2e | ||
|
|
fcd49f2f08 | ||
|
|
b3268c2703 | ||
|
|
d1bfcfafe3 | ||
|
|
490c9f5e16 | ||
|
|
85628d1474 | ||
|
|
720c34d18d | ||
|
|
cd175377ca | ||
|
|
b91ec05e13 | ||
|
|
3bb15f420b | ||
|
|
124a81df5b | ||
|
|
d9472c083d | ||
|
|
ac2a5ef4dd | ||
|
|
ea210a6acf | ||
|
|
afb3bef7af | ||
|
|
b5b5881426 | ||
|
|
76fc7915a8 | ||
|
|
e7798b619b | ||
|
|
8ecef12a20 | ||
|
|
694292ebbf | ||
|
|
7f18f6f8a1 | ||
|
|
0b12a480eb | ||
|
|
2d91a79af3 | ||
|
|
72fcee7227 | ||
|
|
d147ef231f | ||
|
|
1c7af83d32 | ||
|
|
b982dfc071 | ||
|
|
c0da8a00fc | ||
|
|
3f18f689d8 | ||
|
|
9dc4553cf3 | ||
|
|
9a99c94b75 | ||
|
|
682f0b2a54 | ||
|
|
dbab0c1ff5 | ||
|
|
2bf95f5340 | ||
|
|
55561405b8 | ||
|
|
8eef458cea | ||
|
|
64eea9d996 | ||
|
|
60b4e2128b | ||
|
|
2f8cea2792 | ||
|
|
06f9bcf734 | ||
|
|
ee2725762f | ||
|
|
eace0a177c | ||
|
|
80c7d74707 | ||
|
|
a6f5bf821d | ||
|
|
b214406253 | ||
|
|
5b003d80e5 | ||
|
|
185b2d3ee7 | ||
|
|
71bb2a1899 | ||
|
|
785c31b730 | ||
|
|
175da4a88a | ||
|
|
73fc1ef11c | ||
|
|
2d77e44f6f | ||
|
|
033599c4cd | ||
|
|
8096ed4b22 | ||
|
|
b49bfe25af | ||
|
|
8b2f34d802 | ||
|
|
3daed0d6a7 | ||
|
|
d6c1f75e8d | ||
|
|
c80a4c1ddc | ||
|
|
466abcb62d | ||
|
|
69e99f0c16 | ||
|
|
bbee6dfc58 | ||
|
|
2d60cf120b | ||
|
|
db17fc2f33 | ||
|
|
c62080d498 | ||
|
|
f9bbe549fa | ||
|
|
55d7fec69c | ||
|
|
e938907150 | ||
|
|
0c40b86e96 | ||
|
|
3d4cf0d8eb | ||
|
|
966e19d278 | ||
|
|
8f930462bd | ||
|
|
bf4fccee15 | ||
|
|
784771a008 | ||
|
|
e4a9d9ae5b | ||
|
|
a6886983dc | ||
|
|
93a34a9635 | ||
|
|
91a54029f9 | ||
|
|
5400b49ed6 | ||
|
|
c17fc3c0c1 | ||
|
|
6f248836ea | ||
|
|
693c1821b0 | ||
|
|
62afe3bd5a | ||
|
|
53a756d045 | ||
|
|
321b687ae6 | ||
|
|
c8617f0574 | ||
|
|
7843e2ead0 | ||
|
|
dca3d071d7 | ||
|
|
436f077482 | ||
|
|
ab3f705019 | ||
|
|
d739989ec8 | ||
|
|
52ee1967d6 | ||
|
|
1af7284b5d | ||
|
|
e1bcefd805 | ||
|
|
2159b0183d | ||
|
|
078fd225a9 | ||
|
|
83974828c7 | ||
|
|
2412f74557 | ||
|
|
db06d3621d | ||
|
|
c25170d2f9 | ||
|
|
b3dfe13670 | ||
|
|
6358e84b48 | ||
|
|
8e634d8e49 | ||
|
|
1a21376515 | ||
|
|
bf45a2b6d3 | ||
|
|
475ce955e7 | ||
|
|
5e44289787 | ||
|
|
e66888511f | ||
|
|
e9e5beee1f | ||
|
|
ffd134c09d | ||
|
|
bfadd5c9a5 | ||
|
|
16e9279420 | ||
|
|
ac0903ef9f | ||
|
|
648839dffd | ||
|
|
489a604920 | ||
|
|
2ac3435810 | ||
|
|
69ea180d26 | ||
|
|
f52f217df0 | ||
|
|
df74aa5d7e | ||
|
|
41932c53ae | ||
|
|
4296db794f | ||
|
|
9ab9302409 | ||
|
|
0187376e54 | ||
|
|
7340d2cb83 | ||
|
|
641d4477d5 | ||
|
|
3ff2fb69af | ||
|
|
e3024b1bcb | ||
|
|
e733b87865 | ||
|
|
919985dc1b | ||
|
|
d746f7d427 | ||
|
|
b6deab515b | ||
|
|
848220c4ba | ||
|
|
98462bd27e | ||
|
|
2e2515266d | ||
|
|
776ab13276 | ||
|
|
c2ce9a6d93 | ||
|
|
4e3ed56dfa | ||
|
|
dcfcc03497 | ||
|
|
125c20bc06 | ||
|
|
f7696a4480 | ||
|
|
a5d7667cb6 | ||
|
|
d45818ccff | ||
|
|
bcb7af6eb3 | ||
|
|
f438fb6c79 | ||
|
|
371a8a361a | ||
|
|
86b9ce1c88 | ||
|
|
05232034f5 | ||
|
|
7a3da0f606 | ||
|
|
d96406a161 | ||
|
|
ffa5962356 | ||
|
|
67e74da3ba | ||
|
|
9ee2d79de1 | ||
|
|
79e4a13eee | ||
|
|
4627438373 | ||
|
|
badaaf7092 | ||
|
|
815ac000cc | ||
|
|
7bc5b26c52 | ||
|
|
a0e7ca94b2 | ||
|
|
e56c90d839 | ||
|
|
54003d4d72 | ||
|
|
c47b554fa1 | ||
|
|
b027f64a7f | ||
|
|
3765a5f7f8 | ||
|
|
690661eadd | ||
|
|
f7bbc326e4 | ||
|
|
a184bfc1a6 | ||
|
|
81634440fb | ||
|
|
711d7683ac | ||
|
|
967356bcf5 | ||
|
|
c006ed034a | ||
|
|
d065c65d94 | ||
|
|
e23c372ff1 | ||
|
|
25d2de5629 | ||
|
|
d73a23ce35 | ||
|
|
a62cb3c0f4 | ||
|
|
177da4595e | ||
|
|
e4f05129fe | ||
|
|
c25b994917 | ||
|
|
95c4c5270a | ||
|
|
1cf6a15a08 | ||
|
|
47d206611a | ||
|
|
a6789cf653 | ||
|
|
933cd858e0 | ||
|
|
8856361076 | ||
|
|
d826df7ef6 | ||
|
|
d8a9b42da6 | ||
|
|
7d926f86e8 | ||
|
|
1579544d57 | ||
|
|
1cee3fb4a5 | ||
|
|
a8e2ad53dd | ||
|
|
6821fa7246 | ||
|
|
09c68da1bd | ||
|
|
73064d62cf | ||
|
|
168ed2a782 | ||
|
|
9f60b29495 | ||
|
|
7abcd78426 | ||
|
|
d5295301de | ||
|
|
beccc49b81 | ||
|
|
037e7ffe33 | ||
|
|
293da8ed20 | ||
|
|
2780ab2f6c | ||
|
|
1ed3c81b58 | ||
|
|
50ce0a25b2 | ||
|
|
d784227603 | ||
|
|
ab9ed91539 | ||
|
|
421256063e | ||
|
|
75459bc70c | ||
|
|
33752eabb8 | ||
|
|
f1d1bb9167 | ||
|
|
68eaff24b0 | ||
|
|
862024cae1 | ||
|
|
9d6bcd67c3 | ||
|
|
d97ecfe147 | ||
|
|
0d991de50a | ||
|
|
4f278a0255 | ||
|
|
6e72a3cff1 | ||
|
|
1532c77ce6 | ||
|
|
5ffbce275c | ||
|
|
0e2ff2dddb | ||
|
|
c0c446a095 | ||
|
|
33dbd44449 | ||
|
|
7b0979c1e9 | ||
|
|
c9849dd41d | ||
|
|
d44e97d3f2 | ||
|
|
8713ab0f67 | ||
|
|
6a47339bf8 | ||
|
|
1c0fb6d641 | ||
|
|
b45eee29eb | ||
|
|
6d26274459 | ||
|
|
2fb07de7bc | ||
|
|
7678dc6b49 | ||
|
|
1944dd55a7 | ||
|
|
0b6c724743 | ||
|
|
fa98023375 | ||
|
|
e79a911bac | ||
|
|
fd3efc71fd | ||
|
|
0458de18de | ||
|
|
f94ac8c770 | ||
|
|
a03c28a916 | ||
|
|
7b7fdf27f3 | ||
|
|
192e564e26 | ||
|
|
b8c5099cde | ||
|
|
ea5bca9067 | ||
|
|
e33eafd34f | ||
|
|
e1344b5497 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,3 +5,8 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
14
.github/workflows/build-containers.yml
vendored
14
.github/workflows/build-containers.yml
vendored
@@ -49,14 +49,14 @@ jobs:
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
uses: docker/setup-buildx-action@16c0bc4a6e6ada2cfd8afd41d22d95379cf7c32a # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Windows Paraview Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Run at 2 am
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
|
||||
|
||||
jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||
exit 0
|
||||
27
.github/workflows/unit_tests.yaml
vendored
27
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -87,17 +87,17 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
@@ -133,10 +133,11 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -151,10 +152,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -175,7 +176,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
@@ -185,10 +186,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -210,6 +211,6 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
||||
12
.github/workflows/valid-style.yml
vendored
12
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -68,10 +68,11 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -80,6 +81,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
||||
16
.github/workflows/windows_python.yml
vendored
16
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -33,16 +33,16 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,16 +57,16 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
218
CHANGELOG.md
218
CHANGELOG.md
@@ -1,3 +1,221 @@
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **`requires()` directive and enhanced package requirements**
|
||||
|
||||
We've added some more enhancements to requirements in Spack (#36286).
|
||||
|
||||
There is a new `requires()` directive for packages. `requires()` is the opposite of
|
||||
`conflicts()`. You can use it to impose constraints on this package when certain
|
||||
conditions are met:
|
||||
|
||||
```python
|
||||
requires(
|
||||
"%apple-clang",
|
||||
when="platform=darwin",
|
||||
msg="This package builds only with clang on macOS"
|
||||
)
|
||||
```
|
||||
|
||||
More on this in [the docs](
|
||||
https://spack.rtfd.io/en/latest/packaging_guide.html#conflicts-and-requirements).
|
||||
|
||||
You can also now add a `when:` clause to `requires:` in your `packages.yaml`
|
||||
configuration or in an environment:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "Only OpenMPI 4.1.5 and up can build with fancy compilers"
|
||||
```
|
||||
|
||||
More details can be found [here](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements)
|
||||
|
||||
2. **Exact versions**
|
||||
|
||||
Spack did not previously have a way to distinguish a version if it was a prefix of
|
||||
some other version. For example, `@3.2` would match `3.2`, `3.2.1`, `3.2.2`, etc. You
|
||||
can now match *exactly* `3.2` with `@=3.2`. This is useful, for example, if you need
|
||||
to patch *only* the `3.2` version of a package. The new syntax is described in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier).
|
||||
|
||||
Generally, when writing packages, you should prefer to use ranges like `@3.2` over
|
||||
the specific versions, as this allows the concretizer more leeway when selecting
|
||||
versions of dependencies. More details and recommendations are in the [packaging guide](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#ranges-versus-specific-versions).
|
||||
|
||||
See #36273 for full details on the version refactor.
|
||||
|
||||
3. **New testing interface**
|
||||
|
||||
Writing package tests is now much simpler with a new [test interface](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests).
|
||||
|
||||
Writing a test is now as easy as adding a method that starts with `test_`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""run installed example"""
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
```
|
||||
|
||||
You can use Python's native `assert` statement to implement your checks -- no more
|
||||
need to fiddle with `run_test` or other test framework methods. Spack will
|
||||
introspect the class and run `test_*` methods when you run `spack test`,
|
||||
|
||||
4. **More stable concretization**
|
||||
|
||||
* Now, `spack concretize` will *only* concretize the new portions of the environment
|
||||
and will not change existing parts of an environment unless you specify `--force`.
|
||||
This has always been true for `unify:false`, but not for `unify:true` and
|
||||
`unify:when_possible` environments. Now it is true for all of them (#37438, #37681).
|
||||
|
||||
* The concretizer has a new `--reuse-deps` argument that *only* reuses dependencies.
|
||||
That is, it will always treat the *roots* of your environment as it would with
|
||||
`--fresh`. This allows you to upgrade just the roots of your environment while
|
||||
keeping everything else stable (#30990).
|
||||
|
||||
5. **Weekly develop snapshot releases**
|
||||
|
||||
Since last year, we have maintained a buildcache of `develop` at
|
||||
https://binaries.spack.io/develop, but the cache can grow to contain so many builds
|
||||
as to be unwieldy. When we get a stable `develop` build, we snapshot the release and
|
||||
add a corresponding tag the Spack repository. So, you can use a stack from a specific
|
||||
day. There are now tags in the spack repository like:
|
||||
|
||||
* `develop-2023-05-14`
|
||||
* `develop-2023-05-18`
|
||||
|
||||
that correspond to build caches like:
|
||||
|
||||
* https://binaries.spack.io/develop-2023-05-14/e4s
|
||||
* https://binaries.spack.io/develop-2023-05-18/e4s
|
||||
|
||||
We plan to store these snapshot releases weekly.
|
||||
|
||||
6. **Specs in buildcaches can be referenced by hash.**
|
||||
|
||||
* Previously, you could run `spack buildcache list` and see the hashes in
|
||||
buildcaches, but referring to them by hash would fail.
|
||||
* You can now run commands like `spack spec` and `spack install` and refer to
|
||||
buildcache hashes directly, e.g. `spack install /abc123` (#35042)
|
||||
|
||||
7. **New package and buildcache index websites**
|
||||
|
||||
Our public websites for searching packages have been completely revamped and updated.
|
||||
You can check them out here:
|
||||
|
||||
* *Package Index*: https://packages.spack.io
|
||||
* *Buildcache Index*: https://cache.spack.io
|
||||
|
||||
Both are searchable and more interactive than before. Currently major releases are
|
||||
shown; UI for browsing `develop` snapshots is coming soon.
|
||||
|
||||
8. **Default CMake and Meson build types are now Release**
|
||||
|
||||
Spack has historically defaulted to building with optimization and debugging, but
|
||||
packages like `llvm` can be enormous with debug turned on. Our default build type for
|
||||
all Spack packages is now `Release` (#36679, #37436). This has a number of benefits:
|
||||
|
||||
* much smaller binaries;
|
||||
* higher default optimization level; and
|
||||
* defining `NDEBUG` disables assertions, which may lead to further speedups.
|
||||
|
||||
You can still get the old behavior back through requirements and package preferences.
|
||||
|
||||
## Other new commands and directives
|
||||
|
||||
* `spack checksum` can automatically add new versions to package (#24532)
|
||||
* new command: `spack pkg grep` to easily search package files (#34388)
|
||||
* New `maintainers` directive (#35083)
|
||||
* Add `spack buildcache push` (alias to `buildcache create`) (#34861)
|
||||
* Allow using `-j` to control the parallelism of concretization (#37608)
|
||||
* Add `--exclude` option to 'spack external find' (#35013)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* editing: add higher-precedence `SPACK_EDITOR` environment variable
|
||||
* Many YAML formatting improvements from updating `ruamel.yaml` to the latest version
|
||||
supporting Python 3.6. (#31091, #24885, #37008).
|
||||
* Requirements and preferences should not define (non-git) versions (#37687, #37747)
|
||||
* Environments now store spack version/commit in `spack.lock` (#32801)
|
||||
* User can specify the name of the `packages` subdirectory in repositories (#36643)
|
||||
* Add container images supporting RHEL alternatives (#36713)
|
||||
* make version(...) kwargs explicit (#36998)
|
||||
|
||||
## Notable refactors
|
||||
|
||||
* buildcache create: reproducible tarballs (#35623)
|
||||
* Bootstrap most of Spack dependencies using environments (#34029)
|
||||
* Split `satisfies(..., strict=True/False)` into two functions (#35681)
|
||||
* spack install: simplify behavior when inside environments (#35206)
|
||||
|
||||
## Binary cache and stack updates
|
||||
|
||||
* Major simplification of CI boilerplate in stacks (#34272, #36045)
|
||||
* Many improvements to our CI pipeline's reliability
|
||||
|
||||
## Removals, Deprecations, and disablements
|
||||
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
|
||||
* Support for Python 2 was deprecated in `v0.19.0` and has been removed. `v0.20.0` only
|
||||
supports Python 3.6 and higher.
|
||||
* Deprecated target names are no longer recognized by Spack. Use generic names instead:
|
||||
* `graviton` is now `cortex_a72`
|
||||
* `graviton2` is now `neoverse_n1`
|
||||
* `graviton3` is now `neoverse_v1`
|
||||
* `blacklist` and `whitelist` in module configuration were deprecated in `v0.19.0` and are
|
||||
removed in this release. Use `exclude` and `include` instead.
|
||||
* The `ignore=` parameter of the `extends()` directive has been removed. It was not used by
|
||||
any builtin packages and is no longer needed to avoid conflicts in environment views (#35588).
|
||||
* Support for the old YAML buildcache format has been removed. It was deprecated in `v0.19.0` (#34347).
|
||||
* `spack find --bootstrap` has been removed. It was deprecated in `v0.19.0`. Use `spack
|
||||
--bootstrap find` instead (#33964).
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are now removed, having been
|
||||
deprecated in `v0.19.0`. Use `spack bootstrap enable` and `spack bootstrap disable`.
|
||||
* The `--mirror-name`, `--mirror-url`, and `--directory` options to buildcache and
|
||||
mirror commands were deprecated in `v0.19.0` and have now been removed. They have been
|
||||
replaced by positional arguments (#37457).
|
||||
* Deprecate `env:` as top level environment key (#37424)
|
||||
* deprecate buildcache create --rel, buildcache install --allow-root (#37285)
|
||||
* Support for very old perl-like spec format strings (e.g., `$_$@$%@+$+$=`) has been
|
||||
removed (#37425). This was deprecated in in `v0.15` (#10556).
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* bugfix: don't fetch package metadata for unknown concrete specs (#36990)
|
||||
* Improve package source code context display on error (#37655)
|
||||
* Relax environment manifest filename requirements and lockfile identification criteria (#37413)
|
||||
* `installer.py`: drop build edges of installed packages by default (#36707)
|
||||
* Bugfix: package requirements with git commits (#35057, #36347)
|
||||
* Package requirements: allow single specs in requirement lists (#36258)
|
||||
* conditional variant values: allow boolean (#33939)
|
||||
* spack uninstall: follow run/link edges on --dependents (#34058)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,179 total packages, 499 new since `v0.19.0`
|
||||
* 329 new Python packages
|
||||
* 31 new R packages
|
||||
* 336 people contributed to this release
|
||||
* 317 committers to packages
|
||||
* 62 committers to core
|
||||
|
||||
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
@@ -214,7 +214,7 @@ goto :end_switch
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
|
||||
132
bin/spack.ps1
Normal file
132
bin/spack.ps1
Normal file
@@ -0,0 +1,132 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# #######################################################################
|
||||
|
||||
function Compare-CommonArgs {
|
||||
$CMDArgs = $args[0]
|
||||
# These aruments take precedence and call for no futher parsing of arguments
|
||||
# invoke actual Spack entrypoint with that context and exit after
|
||||
"--help", "-h", "--version", "-V" | ForEach-Object {
|
||||
$arg_opt = $_
|
||||
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
||||
return $true
|
||||
}
|
||||
}
|
||||
return $false
|
||||
}
|
||||
|
||||
function Read-SpackArgs {
|
||||
$SpackCMD_params = @()
|
||||
$SpackSubCommand = $NULL
|
||||
$SpackSubCommandArgs = @()
|
||||
$args_ = $args[0]
|
||||
$args_ | ForEach-Object {
|
||||
if (!$SpackSubCommand) {
|
||||
if($_.SubString(0,1) -eq "-")
|
||||
{
|
||||
$SpackCMD_params += $_
|
||||
}
|
||||
else{
|
||||
$SpackSubCommand = $_
|
||||
}
|
||||
}
|
||||
else{
|
||||
$SpackSubCommandArgs += $_
|
||||
}
|
||||
}
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
}
|
||||
else{
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python $Env:SPACK_ROOT/bin/spack env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
# Process Spack commands with special conditions
|
||||
# all other commands are piped directly to Spack
|
||||
switch($SpackSubCommand)
|
||||
{
|
||||
"cd" {Invoke-SpackCD}
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
16
lib/spack/docs/_pygments/style.py
Normal file
16
lib/spack/docs/_pygments/style.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
@@ -149,7 +149,6 @@ def setup(sphinx):
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
@@ -233,30 +232,8 @@ def setup(sphinx):
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
sys.path.append("./_pygments")
|
||||
pygments_style = "style.SpackStyle"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -341,16 +318,15 @@ class SpackStyle(DefaultStyle):
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# 'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# 'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
||||
@@ -143,6 +143,26 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
* - AlmaLinux 8
|
||||
- ``almalinux:8``
|
||||
- ``spack/almalinux8``
|
||||
* - AlmaLinux 9
|
||||
- ``almalinux:9``
|
||||
- ``spack/almalinux9``
|
||||
* - Rocky Linux 8
|
||||
- ``rockylinux:8``
|
||||
- ``spack/rockylinux8``
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
|
||||
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
@@ -616,7 +636,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
||||
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
zlib: "{name}-{version}"
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
||||
@@ -317,7 +317,7 @@ installed, but you know that new compilers have been added to your
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load gcc-4.9.0
|
||||
$ module load gcc/4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
@@ -76,6 +76,7 @@ or refer to the full manual below.
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -35,27 +35,27 @@ showing lots of installed packages:
|
||||
$ module avail
|
||||
|
||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
||||
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
||||
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
||||
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
||||
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
||||
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
||||
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
||||
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
|
||||
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
|
||||
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
|
||||
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
|
||||
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
|
||||
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
|
||||
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
|
||||
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
|
||||
|
||||
The names should look familiar, as they resemble the output from ``spack find``.
|
||||
For example, you could type the following command to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
|
||||
|
||||
Neither of these is particularly pretty, easy to remember, or easy to
|
||||
type. Luckily, Spack offers many facilities for customizing the module
|
||||
@@ -779,35 +779,35 @@ cut-and-pasted into a shell script. For example:
|
||||
|
||||
$ spack module tcl loads --dependencies py-numpy git
|
||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
|
||||
The script may be further edited by removing unnecessary modules.
|
||||
|
||||
@@ -826,12 +826,12 @@ For example, consider the following on one system:
|
||||
.. code-block:: console
|
||||
|
||||
$ module avail
|
||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads antlr # WRONG!
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
@@ -3071,7 +3071,7 @@ follows:
|
||||
# The library provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
urllib3 <2
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
|
||||
484
lib/spack/docs/signing.rst
Normal file
484
lib/spack/docs/signing.rst
Normal file
@@ -0,0 +1,484 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _signing:
|
||||
|
||||
=====================
|
||||
Spack Package Signing
|
||||
=====================
|
||||
|
||||
The goal of package signing in Spack is to provide data integrity
|
||||
assurances around official packages produced by the automated Spack CI
|
||||
pipelines. These assurances directly address the security of Spack’s
|
||||
software supply chain by explaining why a security-conscious user can
|
||||
be reasonably justified in the belief that packages installed via Spack
|
||||
have an uninterrupted auditable trail back to change management
|
||||
decisions judged to be appropriate by the Spack maintainers. This is
|
||||
achieved through cryptographic signing of packages built by Spack CI
|
||||
pipelines based on code that has been transparently reviewed and
|
||||
approved on GitHub. This document describes the signing process for
|
||||
interested users.
|
||||
|
||||
.. _risks:
|
||||
|
||||
------------------------------
|
||||
Risks, Impact and Threat Model
|
||||
------------------------------
|
||||
|
||||
This document addresses the approach taken to safeguard Spack’s
|
||||
reputation with regard to the integrity of the package data produced by
|
||||
Spack’s CI pipelines. It does not address issues of data confidentiality
|
||||
(Spack is intended to be largely open source) or availability (efforts
|
||||
are described elsewhere). With that said the main reputational risk can
|
||||
be broadly categorized as a loss of faith in the data integrity due to a
|
||||
breach of the private key used to sign packages. Remediation of a
|
||||
private key breach would require republishing the public key with a
|
||||
revocation certificate, generating a new signing key, an assessment and
|
||||
potential rebuild/resigning of all packages since the key was breached,
|
||||
and finally direct intervention by every spack user to update their copy
|
||||
of Spack’s public keys used for local verification.
|
||||
|
||||
The primary threat model used in mitigating the risks of these stated
|
||||
impacts is one of individual error not malicious intent or insider
|
||||
threat. The primary objective is to avoid the above impacts by making a
|
||||
private key breach nearly impossible due to oversight or configuration
|
||||
error. Obvious and straightforward measures are taken to mitigate issues
|
||||
of malicious interference in data integrity and insider threats but
|
||||
these attack vectors are not systematically addressed. It should be hard
|
||||
to exfiltrate the private key intentionally, and almost impossible to
|
||||
leak the key by accident.
|
||||
|
||||
.. _overview:
|
||||
|
||||
-----------------
|
||||
Pipeline Overview
|
||||
-----------------
|
||||
|
||||
Spack pipelines build software through progressive stages where packages
|
||||
in later stages nominally depend on packages built in earlier stages.
|
||||
For both technical and design reasons these dependencies are not
|
||||
implemented through the default GitLab artifacts mechanism; instead
|
||||
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
||||
retrieved by subsequent stages in the pipeline. Two broad categories of
|
||||
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
||||
pipelines.
|
||||
|
||||
- PR pipelines are launched in response to pull requests made by
|
||||
trusted and untrusted users. Packages built on these pipelines upload
|
||||
code to quarantined AWS S3 locations which cache the built packages
|
||||
for the purposes of review and iteration on the changes proposed in
|
||||
the pull request. Packages built on PR pipelines can come from
|
||||
untrusted users so signing of these pipelines is not implemented.
|
||||
Jobs in these pipelines are executed via normal GitLab runners both
|
||||
within the AWS GitLab infrastructure and at affiliated institutions.
|
||||
- Develop and Release pipelines **sign** the packages they produce and carry
|
||||
strong integrity assurances that trace back to auditable change management
|
||||
decisions. These pipelines only run after members from a trusted group of
|
||||
reviewers verify that the proposed changes in a pull request are appropriate.
|
||||
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
||||
GitLab runners which provide access to the required signing keys within the
|
||||
job. Intermediary keys are used to sign packages in each stage of the
|
||||
pipeline as they are built and a final job officially signs each package
|
||||
external to any specific packages’ build environment. An intermediate key
|
||||
exists in the AWS infrastructure and for each affiliated instritution that
|
||||
maintains protected runners. The runners that execute these pipelines
|
||||
exclusively accept jobs from protected branches meaning the intermediate keys
|
||||
are never exposed to unreviewed code and the official keys are never exposed
|
||||
to any specific build environment.
|
||||
|
||||
.. _key_architecture:
|
||||
|
||||
----------------
|
||||
Key Architecture
|
||||
----------------
|
||||
|
||||
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
||||
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
||||
called specs. Two classes of GPG keys are involved in the process to reduce the
|
||||
impact of an individual private key compromise, these key classes are the
|
||||
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
||||
sub-keys that are used exclusively for signing packages. This can be confusing
|
||||
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
||||
Each key has a private and a public component as well as one or more identities
|
||||
and zero or more signatures.
|
||||
|
||||
-------------------
|
||||
Intermediate CI Key
|
||||
-------------------
|
||||
|
||||
The Intermediate key class is used to sign and verify packages between stages
|
||||
within a develop or release pipeline. An intermediate key exists for the AWS
|
||||
infrastructure as well as each affiliated institution that maintains protected
|
||||
runners. These intermediate keys are made available to the GitLab execution
|
||||
environment building the package so that the package’s dependencies may be
|
||||
verified by the Signing Intermediate CI Public Key and the final package may be
|
||||
signed by the Signing Intermediate CI Private Key.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Intermediate CI Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: None |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
|
||||
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
||||
stored offline completely separate from Spack’s infrastructure. This allows the
|
||||
core development team to append revocation certificates to the GPG key and
|
||||
issue new sub-keys for use in the pipeline. It is our expectation that this
|
||||
will happen on a semi regular basis. A corollary of this is that *this key
|
||||
should not be used to verify package integrity outside the internal CI process.*
|
||||
|
||||
----------------
|
||||
Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
verifies they were signed with an Intermediate CI Key, then strips the
|
||||
Intermediate CI Key signature from the package and re-signs them with the
|
||||
Signing Reputational Private Key. The officially signed packages are then
|
||||
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
||||
reputational key into this final job is done to prevent leakage of the key in a
|
||||
spack package. Because the Signing Reputational Private Key is never exposed to
|
||||
a build job it cannot accidentally end up in any built package.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Reputational Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Spack Project <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: Signed by core development team [#f1]_ |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
The Root Reputational Private Key is stripped out of the GPG key and stored
|
||||
offline completely separate from Spack’s infrastructure. This allows the core
|
||||
development team to append revocation certificates to the GPG key in the
|
||||
unlikely event that the Signing Reputation Private Key is compromised. In
|
||||
general it is the expectation that rotating this key will happen infrequently if
|
||||
at all. This should allow relatively transparent verification for the end-user
|
||||
community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
},
|
||||
|
||||
"buildinfo": {
|
||||
"relative_prefix":
|
||||
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
-----------------------
|
||||
Internal Implementation
|
||||
-----------------------
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
support secure automated signing.
|
||||
|
||||
Secrets Management
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As stated above the Root Private Keys (intermediate and reputational)
|
||||
are stripped from the GPG keys and stored outside Spack’s
|
||||
infrastructure.
|
||||
|
||||
.. warning::
|
||||
**TODO**
|
||||
- Explanation here about where and how access is handled for these keys.
|
||||
- Both Root private keys are protected with strong passwords
|
||||
- Who has access to these and how?
|
||||
|
||||
**Intermediate CI Key**
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
exported as an ASCII armored file and stored in a Kubernetes secret called
|
||||
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
||||
contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
the following way:
|
||||
|
||||
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
||||
a job tagged ``protected`` from a protected GitLab branch. (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job Pod in the
|
||||
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
||||
Kubernetes secret into the build container
|
||||
3. The Intermediate CI Key, affiliated institutions' public key and the
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
--------------------
|
||||
|
||||
Because of the increased impact to end users in the case of a private
|
||||
key breach, the Reputational Key is managed separately from the
|
||||
Intermediate CI Keys and has additional controls. First, the Reputational
|
||||
Key was generated outside of Spack’s infrastructure and has been signed
|
||||
by the core development team. The Reputational Key (along with the
|
||||
Signing Reputational Private Key) was then ASCII armor exported to a
|
||||
file. Unlike the Intermediate CI Key this exported file is not stored as
|
||||
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
||||
itself*\ is encrypted and stored in Kubernetes as the
|
||||
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
||||
|
||||
The encryption of the exported Reputational Key (including the Signing
|
||||
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
||||
keys
|
||||
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
||||
The private key material is decrypted and imported at the time of signing into a
|
||||
memory mounted temporary directory holding the keychain. The signing job uses
|
||||
the `AWS Encryption SDK
|
||||
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
||||
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
||||
decrypt the key is granted to the job Pod through a Kubernetes service account
|
||||
specifically used for this, and only this, function. Finally, for convenience
|
||||
sake, this same secret contains an ASCII-armored export of the *public*
|
||||
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
||||
signing script to verify that packages were built by the pipeline (both on AWS
|
||||
or at affiliated institutions), or signed previously as a part of a different
|
||||
pipeline. This is is done *before* importing decrypting and importing the
|
||||
Signing Reputational Private Key material and officially signing the packages.
|
||||
|
||||
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
||||
following way:
|
||||
|
||||
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
||||
up a job tagged ``notary`` from a protected GitLab branch (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job pod in the
|
||||
pipeline namespace. The job is run in a stripped down purpose-built
|
||||
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
||||
configured to only allow running jobs with this image.
|
||||
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
||||
a path on disk. Note that this becomes several files on disk, the
|
||||
public components of the Intermediate CI Keys, the public components
|
||||
of the Reputational CI, and an AWS KMS encrypted file containing the
|
||||
Singing Reputational Private Key.
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
Non service-account access to the private components of the Reputational
|
||||
Key that are managed through access to the symmetric secret in KMS used
|
||||
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
||||
- See:\ `Encryption SDK
|
||||
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
||||
A small trusted subset of the core development team are the only
|
||||
individuals with access to this symmetric key.
|
||||
|
||||
.. _protected_runners:
|
||||
|
||||
Protected Runners and Reserved Tags
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack has a large number of Gitlab Runners operating in its build farm.
|
||||
These include runners deployed in the AWS Kubernetes cluster as well as
|
||||
runners deployed at affiliated institutions. The majority of runners are
|
||||
shared runners that operate across projects in gitlab.spack.io. These
|
||||
runners pick up jobs primarily from the spack/spack project and execute
|
||||
them in PR pipelines.
|
||||
|
||||
A small number of runners operating on AWS and at affiliated institutions are
|
||||
registered as specific *protected* runners on the spack/spack project. In
|
||||
addition to protected runners there are protected branches on the spack/spack
|
||||
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
||||
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
||||
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
||||
sure jobs are routed to the correct runners, these tags are ``public``,
|
||||
``protected``, and ``notary``. Understanding how all this works together to
|
||||
protect secrets and provide integrity assurances can be a little confusing so
|
||||
lets break these down:
|
||||
|
||||
- **Protected Branches**- Protected branches in Spack prevent anyone
|
||||
other than Maintainers in GitLab from pushing code. In the case of
|
||||
Spack the only Maintainer level entity pushing code to protected
|
||||
branches is Spack bot. Protecting branches also marks them in such a
|
||||
way that Protected Runners will only run jobs from those branches
|
||||
- **Protected Runners**- Protected Runners only run jobs from protected
|
||||
branches. Because protected runners have access to secrets, it's critical
|
||||
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
||||
would be possible for a PR branch to tag a job in such a way that a protected
|
||||
runner executed that job and mounted secrets into a code execution
|
||||
environment that had not been reviewed by Spack maintainers. Note however
|
||||
that in the absence of tagging used to route jobs, public runners *could* run
|
||||
jobs from protected branches. No secrets would be at risk of being breached
|
||||
because non-protected runners do not have access to those secrets; lack of
|
||||
secrets would, however, cause the jobs to fail.
|
||||
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
||||
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
||||
are *job* tags not git tags). These tags are “public”, “private”, and
|
||||
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
||||
executed via a ``generate`` job. The generate job code systematically ensures
|
||||
that no user defined configuration sets these tags. Instead, the ``generate``
|
||||
job sets these tags based on rules related to the branch where this pipeline
|
||||
originated. If the job is a part of a pipeline on a PR branch it sets the
|
||||
``public`` tag. If the job is part of a pipeline on a protected branch it
|
||||
sets the ``protected`` tag. Finally if the job is the package signing job and
|
||||
it is running on a pipeline that is part of a protected branch then it sets
|
||||
the ``notary`` tag.
|
||||
|
||||
Protected Runners are configured to only run jobs from protected branches. Only
|
||||
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
||||
``notary`` tags. This tightly couples jobs on protected branches to protected
|
||||
runners that provide access to the secrets required to sign the built packages.
|
||||
The secrets are can **only** be accessed via:
|
||||
|
||||
1. Runners under direct control of the core development team.
|
||||
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
||||
3. By code running the automated pipeline that has been reviewed by the
|
||||
Spack maintainers and judged to be appropriate.
|
||||
|
||||
Other attempts (either through malicious intent or incompetence) can at
|
||||
worst grab jobs intended for protected runners which will cause those
|
||||
jobs to fail alerting both Spack maintainers and the core development
|
||||
team.
|
||||
|
||||
.. [#f1]
|
||||
The Reputational Key has also cross signed core development team
|
||||
keys.
|
||||
428
lib/spack/env/cc
vendored
428
lib/spack/env/cc
vendored
@@ -416,30 +416,14 @@ input_command="$*"
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
@@ -449,9 +433,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
@@ -459,9 +443,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
@@ -475,7 +459,7 @@ parse_Wl() {
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$1"
|
||||
append return_other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
@@ -483,177 +467,210 @@ parse_Wl() {
|
||||
done
|
||||
}
|
||||
|
||||
categorize_arguments() {
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
unset IFS
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append return_other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
continue
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append return_other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append return_other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append return_other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list -rpath
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Wl,-rpath
|
||||
fi
|
||||
}
|
||||
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list -rpath
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append other_args_list -Wl,-rpath
|
||||
fi
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -673,12 +690,14 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
fi
|
||||
|
||||
spack_flags_list=""
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend flags_list SPACK_FFLAGS
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
@@ -687,7 +706,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -697,10 +716,10 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend flags_list SPACK_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -712,10 +731,25 @@ esac
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
@@ -741,6 +775,8 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
libs_list=""
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
@@ -769,12 +805,16 @@ args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
@@ -782,11 +822,15 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
@@ -795,20 +839,25 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
@@ -864,3 +913,4 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.1 (commit 4b1f21802a23b536bbcce73d3c631a566b20e8bd)
|
||||
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
||||
@@ -2803,7 +2803,7 @@
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "10.2",
|
||||
"versions": "10.2:10.2.99",
|
||||
"flags" : "-mcpu=zeus"
|
||||
},
|
||||
{
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.20.0.dev0"
|
||||
__version__ = "0.21.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
||||
@@ -760,13 +760,12 @@ def hashes_to_prefixes(spec):
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
def get_buildinfo_dict(spec):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
@@ -1209,9 +1208,6 @@ class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Whether to use relative RPATHs
|
||||
relative: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
@@ -1281,41 +1277,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if options.relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if options.relative:
|
||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||
elif not options.allow_root:
|
||||
if not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
if options.relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
@@ -1336,7 +1308,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relative_rpaths"] = options.relative
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
@@ -1596,35 +1567,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for linkname in buildinfo.get("relocate_links", []):
|
||||
orig_path_names.append(os.path.join(prefix, linkname))
|
||||
cur_path_names.append(os.path.join(workdir, linkname))
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
|
||||
@@ -175,12 +175,12 @@ def black_root_spec() -> str:
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
return _root_spec("py-flake8@3.8.2:")
|
||||
|
||||
|
||||
def pytest_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
return _root_spec("py-pytest@6.2.4:")
|
||||
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
|
||||
@@ -589,7 +589,6 @@ def set_module_variables_for_package(pkg):
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
@@ -1216,6 +1215,9 @@ def child_fun():
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
"""Return some context for an error message when the build fails.
|
||||
|
||||
@@ -1244,32 +1246,38 @@ def make_stack(tb, stack=None):
|
||||
|
||||
stack = make_stack(traceback)
|
||||
|
||||
basenames = tuple(base.__name__ for base in CONTEXT_BASES)
|
||||
for tb in stack:
|
||||
frame = tb.tb_frame
|
||||
if "self" in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
# Find the first proper subclass of the PackageBase or BaseBuilder, but
|
||||
# don't provide context if the code is actually in the base classes.
|
||||
obj = frame.f_locals["self"]
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
# Point out the location in the install method where we failed.
|
||||
lines = [
|
||||
"{0}:{1:d}, in {2}:".format(
|
||||
inspect.getfile(frame.f_code),
|
||||
frame.f_lineno - 1, # subtract 1 because f_lineno is 0-indexed
|
||||
frame.f_code.co_name,
|
||||
)
|
||||
]
|
||||
filename = inspect.getfile(frame.f_code)
|
||||
lineno = frame.f_lineno
|
||||
if os.path.basename(filename) == "package.py":
|
||||
# subtract 1 because we inject a magic import at the top of package files.
|
||||
# TODO: get rid of the magic import.
|
||||
lineno -= 1
|
||||
|
||||
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||
|
||||
# Build a message showing context in the install method.
|
||||
sourcelines, start = inspect.getsourcelines(frame)
|
||||
|
||||
# Calculate lineno of the error relative to the start of the function.
|
||||
# Subtract 1 because f_lineno is 0-indexed.
|
||||
fun_lineno = frame.f_lineno - start - 1
|
||||
fun_lineno = lineno - start
|
||||
start_ctx = max(0, fun_lineno - context)
|
||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||
|
||||
@@ -1365,7 +1373,7 @@ def long_message(self):
|
||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||
if os.path.isfile(test_log):
|
||||
out.write("\nSee test log for details:\n")
|
||||
out.write(" {0}n".format(test_log))
|
||||
out.write(" {0}\n".format(test_log))
|
||||
|
||||
return out.getvalue()
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
from typing import Tuple
|
||||
|
||||
@@ -13,21 +14,24 @@
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment=""):
|
||||
def cmake_cache_path(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
return 'set({0} "{1}" CACHE PATH "{2}")\n'.format(name, value, comment)
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} "{1}" CACHE PATH "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
|
||||
|
||||
def cmake_cache_string(name, value, comment=""):
|
||||
def cmake_cache_string(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
return 'set({0} "{1}" CACHE STRING "{2}")\n'.format(name, value, comment)
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} "{1}" CACHE STRING "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
|
||||
|
||||
def cmake_cache_option(name, boolean_value, comment=""):
|
||||
def cmake_cache_option(name, boolean_value, comment="", force=False):
|
||||
"""Generate a string for a cmake configuration option"""
|
||||
|
||||
value = "ON" if boolean_value else "OFF"
|
||||
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
||||
force_str = " FORCE" if force else ""
|
||||
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||
|
||||
|
||||
class CachedCMakeBuilder(CMakeBuilder):
|
||||
@@ -63,6 +67,34 @@ def cache_name(self):
|
||||
def cache_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
||||
|
||||
# Implement a version of the define_from_variant for Cached packages
|
||||
def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
||||
"""Return a Cached CMake field from the given variant's value.
|
||||
See define_from_variant in lib/spack/spack/build_systems/cmake.py package
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if variant not in self.pkg.variants:
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
field = None
|
||||
if isinstance(value, bool):
|
||||
field = cmake_cache_option(cmake_var, value, comment)
|
||||
else:
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
field = cmake_cache_string(cmake_var, value, comment)
|
||||
|
||||
return field
|
||||
|
||||
def initconfig_compiler_entries(self):
|
||||
# This will tell cmake to use the Spack compiler wrappers when run
|
||||
# through Spack, but use the underlying compiler when run outside of
|
||||
@@ -130,6 +162,17 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
@@ -195,26 +238,58 @@ def initconfig_hardware_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
# Provide standard CMake arguments for dependent CachedCMakePackages
|
||||
if spec.satisfies("^cuda"):
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
entries.append("# Cuda")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
cudatoolkitdir = spec["cuda"].prefix
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
||||
entries.append(cmake_cache_path("CUDAToolkit_ROOT", cudatoolkitdir))
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", "${CUDAToolkit_ROOT}/bin/nvcc"))
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
entries.append("# ROCm")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
@@ -15,7 +16,6 @@
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.path
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -271,7 +271,7 @@ def std_args(pkg, generator=None):
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
|
||||
@@ -102,11 +102,10 @@ def cuda_flags(arch_list):
|
||||
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
depends_on("cuda@11.8:", when="cuda_arch=90")
|
||||
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
|
||||
@@ -121,7 +121,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
# Only if environment modifications are desired (default is +envmods)
|
||||
if "+envmods" in self.spec:
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.install_test import test_part
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
@@ -167,18 +168,65 @@ def remove_files_from_view(self, view, merge_map):
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test(self):
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python.path
|
||||
for module in self.import_modules:
|
||||
self.run_test(
|
||||
inspect.getmodule(self).python.path,
|
||||
["-c", "import {0}".format(module)],
|
||||
purpose="checking import of {0}".format(module),
|
||||
with test_part(
|
||||
self,
|
||||
f"test_imports_{module}",
|
||||
purpose=f"checking import of {module}",
|
||||
work_dir="spack-test",
|
||||
)
|
||||
):
|
||||
python("-c", f"import {module}")
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
@@ -225,51 +273,6 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.directives import extends, maintainers
|
||||
from spack.directives import extends
|
||||
|
||||
from .generic import GenericBuilder, Package
|
||||
|
||||
@@ -71,8 +71,6 @@ class RPackage(Package):
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
maintainers("glennpj")
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "RPackage"
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
@@ -30,8 +31,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
#: Name of private sip module to install alongside package
|
||||
sip_module = "sip"
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
#: Callback names for install-time testing
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "sip"
|
||||
|
||||
@@ -87,18 +88,20 @@ def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
inspect.getmodule(self).python(*args, **kwargs)
|
||||
|
||||
def test(self):
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
self.run_test(
|
||||
inspect.getmodule(self).python.path,
|
||||
["-c", "import {0}".format(module)],
|
||||
with spack.install_test.test_part(
|
||||
self,
|
||||
"test_imports_{0}".format(module),
|
||||
purpose="checking import of {0}".format(module),
|
||||
work_dir="spack-test",
|
||||
)
|
||||
):
|
||||
python("-c", "import {0}".format(module))
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
|
||||
@@ -28,7 +28,6 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.compilers as compilers
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
@@ -70,17 +69,10 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
||||
return False
|
||||
|
||||
|
||||
def _is_main_phase(phase_name):
|
||||
return True if phase_name == "specs" else False
|
||||
|
||||
|
||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
def get_job_name(spec, osarch, build_group):
|
||||
"""Given the necessary parts, format the gitlab job name
|
||||
|
||||
Arguments:
|
||||
phase (str): Either 'specs' for the main phase, or the name of a
|
||||
bootstrapping phase
|
||||
strip_compiler (bool): Should compiler be stripped from job name
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
@@ -93,12 +85,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_str = ""
|
||||
format_args = []
|
||||
|
||||
if phase:
|
||||
format_str += "({{{0}}})".format(item_idx)
|
||||
format_args.append(phase)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_str += "{{{0}}}".format(item_idx)
|
||||
format_args.append(spec.name)
|
||||
item_idx += 1
|
||||
|
||||
@@ -110,10 +97,9 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
if _is_main_phase(phase) is True or strip_compiler is False:
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
|
||||
format_str += " {{{0}}}".format(item_idx)
|
||||
format_args.append(osarch)
|
||||
@@ -153,49 +139,33 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=False, mirrors_to_check=None
|
||||
):
|
||||
spec_deps_obj = _compute_spec_deps(
|
||||
specs, check_index_only=check_index_only, mirrors_to_check=mirrors_to_check
|
||||
)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels):
|
||||
spec_deps_obj = _compute_spec_deps(specs)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj["dependencies"]
|
||||
specs = spec_deps_obj["specs"]
|
||||
|
||||
for entry in specs:
|
||||
spec_labels[entry["label"]] = {
|
||||
"spec": entry["spec"],
|
||||
"needs_rebuild": entry["needs_rebuild"],
|
||||
}
|
||||
spec_labels[entry["label"]] = entry["spec"]
|
||||
|
||||
for entry in dependencies:
|
||||
_add_dependency(entry["spec"], entry["depends"], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
def stage_spec_jobs(specs):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
|
||||
Arguments:
|
||||
specs (Iterable): Specs to build
|
||||
check_index_only (bool): Regardless of whether DAG pruning is enabled,
|
||||
all configured mirrors are searched to see if binaries for specs
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
|
||||
spec_labels: A dictionary mapping the spec labels which are made of
|
||||
(pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild"
|
||||
keys. The root spec is the spec of which this spec is a dependency
|
||||
and the spec is the formatted spec string for this spec.
|
||||
spec_labels: A dictionary mapping the spec labels (which are formatted
|
||||
as pkg-name/hash-prefix) to concrete specs.
|
||||
|
||||
deps: A dictionary where the keys should also have appeared as keys in
|
||||
the spec_labels dictionary, and the values are the set of
|
||||
@@ -224,13 +194,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(
|
||||
specs,
|
||||
deps,
|
||||
spec_labels,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check,
|
||||
)
|
||||
_get_spec_dependencies(specs, deps, spec_labels)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -256,24 +220,36 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
return spec_labels, deps, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
tty.msg(" Staging summary ([x] means a job needs rebuilding):")
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(" {0}".format(m.fetch_url))
|
||||
|
||||
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage):
|
||||
s = spec_labels[job]["spec"]
|
||||
s = spec_labels[job]
|
||||
rebuild = rebuild_decisions[job].rebuild
|
||||
reason = rebuild_decisions[job].reason
|
||||
reason_msg = " ({0})".format(reason) if reason else ""
|
||||
tty.msg(
|
||||
" [{1}] {0} -> {2}".format(
|
||||
job, "x" if spec_labels[job]["needs_rebuild"] else " ", _get_spec_string(s)
|
||||
" [{1}] {0} -> {2}{3}".format(
|
||||
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
|
||||
)
|
||||
)
|
||||
if rebuild_decisions[job].mirrors:
|
||||
tty.msg(" found on the following mirrors:")
|
||||
for murl in rebuild_decisions[job].mirrors:
|
||||
tty.msg(" {0}".format(murl))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
def _compute_spec_deps(spec_list):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -337,12 +313,8 @@ def append_dep(s, d):
|
||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = _spec_deps_key(d)
|
||||
@@ -352,14 +324,8 @@ def append_dep(s, d):
|
||||
|
||||
append_dep(skey, dkey)
|
||||
|
||||
for spec_label, spec_holder in spec_labels.items():
|
||||
specs.append(
|
||||
{
|
||||
"label": spec_label,
|
||||
"spec": spec_holder["spec"],
|
||||
"needs_rebuild": spec_holder["needs_rebuild"],
|
||||
}
|
||||
)
|
||||
for spec_label, concrete_spec in spec_labels.items():
|
||||
specs.append({"label": spec_label, "spec": concrete_spec})
|
||||
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
|
||||
@@ -371,26 +337,17 @@ def _spec_matches(spec, match_string):
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
phase_name,
|
||||
strip_compilers,
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
stage_spec_dict,
|
||||
enable_artifacts_buildcache,
|
||||
dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or dep_spec_info["needs_rebuild"]:
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(
|
||||
phase_name, strip_compilers, dep_job, dep_job.architecture, build_group
|
||||
),
|
||||
"job": get_job_name(dep_job, dep_job.architecture, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
@@ -490,17 +447,12 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
return affected_specs
|
||||
|
||||
|
||||
def _build_jobs(phases, staged_phases):
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
def _build_jobs(spec_labels, stages):
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
|
||||
|
||||
def _noop(x):
|
||||
@@ -519,14 +471,21 @@ def _unpack_script(script_section, op=_noop):
|
||||
return script
|
||||
|
||||
|
||||
class RebuildDecision(object):
|
||||
def __init__(self):
|
||||
self.rebuild = True
|
||||
self.mirrors = []
|
||||
self.reason = ""
|
||||
|
||||
|
||||
class SpackCI:
|
||||
"""Spack CI object used to generate intermediate representation
|
||||
used by the CI generator(s).
|
||||
"""
|
||||
|
||||
def __init__(self, ci_config, phases, staged_phases):
|
||||
def __init__(self, ci_config, spec_labels, stages):
|
||||
"""Given the information from the ci section of the config
|
||||
and the job phases setup meta data needed for generating Spack
|
||||
and the staged jobs, set up meta data needed for generating Spack
|
||||
CI IR.
|
||||
"""
|
||||
|
||||
@@ -541,9 +500,6 @@ def __init__(self, ci_config, phases, staged_phases):
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"bootstrap": self.ci_config.get(
|
||||
"bootstrap", []
|
||||
), # This is deprecated and should be removed
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
@@ -551,7 +507,7 @@ def __init__(self, ci_config, phases, staged_phases):
|
||||
}
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
for spec, dag_hash in _build_jobs(phases, staged_phases):
|
||||
for spec, dag_hash in _build_jobs(spec_labels, stages):
|
||||
jobs[dag_hash] = self.__init_job(spec)
|
||||
|
||||
for name in self.named_jobs:
|
||||
@@ -751,11 +707,12 @@ def generate_gitlab_ci_yaml(
|
||||
env.concretize()
|
||||
env.write()
|
||||
|
||||
yaml_root = ev.config_dict(env.manifest)
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
config_deprecated = False
|
||||
if not ci_config:
|
||||
tty.warn("Environment does not have `ci` a configuration")
|
||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||
@@ -768,6 +725,7 @@ def generate_gitlab_ci_yaml(
|
||||
)
|
||||
translate_deprecated_config(gitlabci_config)
|
||||
ci_config = gitlabci_config
|
||||
config_deprecated = True
|
||||
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||
@@ -831,6 +789,14 @@ def generate_gitlab_ci_yaml(
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||
|
||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.warn(
|
||||
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||
"instead.",
|
||||
)
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
@@ -863,25 +829,6 @@ def generate_gitlab_ci_yaml(
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
bootstrap_specs = []
|
||||
phases = []
|
||||
if "bootstrap" in ci_config:
|
||||
for phase in ci_config["bootstrap"]:
|
||||
try:
|
||||
phase_name = phase.get("name")
|
||||
strip_compilers = phase.get("compiler-agnostic")
|
||||
except AttributeError:
|
||||
phase_name = phase
|
||||
strip_compilers = False
|
||||
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
||||
|
||||
for bs in env.spec_lists[phase_name]:
|
||||
bootstrap_specs.append(
|
||||
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
||||
)
|
||||
|
||||
phases.append({"name": "specs", "strip-compilers": False})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
@@ -936,7 +883,7 @@ def generate_gitlab_ci_yaml(
|
||||
# Add config scopes to environment
|
||||
env_includes = env_yaml_root["spack"].get("include", [])
|
||||
cli_scopes = [
|
||||
os.path.abspath(s.path)
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
and s.path not in env_includes
|
||||
@@ -983,39 +930,13 @@ def generate_gitlab_ci_yaml(
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.warn(e)
|
||||
|
||||
staged_phases = {}
|
||||
try:
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
if phase_name == "specs":
|
||||
# Anything in the "specs" of the environment are already
|
||||
# concretized by the block at the top of this method, so we
|
||||
# only need to find the concrete versions, and then avoid
|
||||
# re-concretizing them needlessly later on.
|
||||
concrete_phase_specs = [
|
||||
concrete
|
||||
for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists[phase_name]
|
||||
]
|
||||
else:
|
||||
# Any specs lists in other definitions (but not in the
|
||||
# "specs") of the environment are not yet concretized so we
|
||||
# have to concretize them explicitly here.
|
||||
concrete_phase_specs = env.spec_lists[phase_name]
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
for phase_spec in concrete_phase_specs:
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check,
|
||||
)
|
||||
finally:
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
spec_labels, dependencies, stages = stage_spec_jobs(
|
||||
[
|
||||
concrete
|
||||
for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists["specs"]
|
||||
]
|
||||
)
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -1038,276 +959,212 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||
|
||||
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
||||
spack_ci = SpackCI(ci_config, spec_labels, stages)
|
||||
spack_ci_ir = spack_ci.generate_ir()
|
||||
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
strip_compilers = phase["strip-compilers"]
|
||||
rebuild_decisions = {}
|
||||
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
for stage_jobs in stages:
|
||||
stage_name = "stage-{0}".format(stage_id)
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
|
||||
for stage_jobs in stages:
|
||||
stage_name = "stage-{0}".format(stage_id)
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
spec_record = RebuildDecision()
|
||||
rebuild_decisions[spec_label] = spec_record
|
||||
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, untouched by change.".format(
|
||||
release_spec.name, release_spec.dag_hash()[:7]
|
||||
)
|
||||
)
|
||||
spec_record["needs_rebuild"] = False
|
||||
continue
|
||||
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
if not job_object:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
spec_record.rebuild = False
|
||||
spec_record.reason = "Pruned, untouched by change."
|
||||
continue
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
job_object["tags"].extend(["protected"])
|
||||
elif spack_pipeline_type == "spack_pull_request":
|
||||
job_object["tags"].extend(["public"])
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
)
|
||||
|
||||
if "script" not in job_object:
|
||||
raise AttributeError
|
||||
spec_record.rebuild = not up_to_date_mirrors
|
||||
if up_to_date_mirrors:
|
||||
spec_record.reason = "Pruned, found in mirrors"
|
||||
spec_record.mirrors = [m["mirror_url"] for m in up_to_date_mirrors]
|
||||
else:
|
||||
spec_record.reason = "Scheduled, not found anywhere"
|
||||
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
)
|
||||
if not job_object:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
continue
|
||||
|
||||
if "before_script" in job_object:
|
||||
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
job_object["tags"].extend(["protected"])
|
||||
elif spack_pipeline_type == "spack_pull_request":
|
||||
job_object["tags"].extend(["public"])
|
||||
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
if "script" not in job_object:
|
||||
raise AttributeError
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(
|
||||
phase_name, strip_compilers, release_spec, osname, build_group
|
||||
)
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
|
||||
compiler_action = "NONE"
|
||||
if len(phases) > 1:
|
||||
compiler_action = "FIND_ANY"
|
||||
if _is_main_phase(phase_name):
|
||||
compiler_action = "INSTALL_MISSING"
|
||||
job_object["script"] = _unpack_script(
|
||||
job_object["script"], op=main_script_replacements
|
||||
)
|
||||
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_COMPILER_ACTION"] = compiler_action
|
||||
if "before_script" in job_object:
|
||||
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label]["spec"])
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
phase_name,
|
||||
strip_compilers,
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
spec_labels,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(release_spec, osname, build_group)
|
||||
|
||||
rebuild_spec = spec_record["needs_rebuild"]
|
||||
|
||||
# This next section helps gitlab make sure the right
|
||||
# bootstrapped compiler exists in the artifacts buildcache by
|
||||
# creating an artificial dependency between this spec and its
|
||||
# compiler. So, if we are in the main phase, and if the
|
||||
# compiler we are supposed to use is listed in any of the
|
||||
# bootstrap spec lists, then we will add more dependencies to
|
||||
# the job (that compiler and maybe it's dependencies as well).
|
||||
if _is_main_phase(phase_name):
|
||||
spec_arch_family = release_spec.architecture.target.microarchitecture.family
|
||||
compiler_pkg_spec = compilers.pkg_spec_for_compiler(release_spec.compiler)
|
||||
for bs in bootstrap_specs:
|
||||
c_spec = bs["spec"]
|
||||
bs_arch = c_spec.architecture
|
||||
bs_arch_family = bs_arch.target.microarchitecture.family
|
||||
if (
|
||||
c_spec.intersects(compiler_pkg_spec)
|
||||
and bs_arch_family == spec_arch_family
|
||||
):
|
||||
# We found the bootstrap compiler this release spec
|
||||
# should be built with, so for DAG scheduling
|
||||
# purposes, we will at least add the compiler spec
|
||||
# to the jobs "needs". But if artifact buildcache
|
||||
# is enabled, we'll have to add all transtive deps
|
||||
# of the compiler as well.
|
||||
|
||||
# Here we check whether the bootstrapped compiler
|
||||
# needs to be rebuilt. Until compilers are proper
|
||||
# dependencies, we artificially force the spec to
|
||||
# be rebuilt if the compiler targeted to build it
|
||||
# needs to be rebuilt.
|
||||
bs_specs, _, _ = staged_phases[bs["phase-name"]]
|
||||
c_spec_key = _spec_deps_key(c_spec)
|
||||
rbld_comp = bs_specs[c_spec_key]["needs_rebuild"]
|
||||
rebuild_spec = rebuild_spec or rbld_comp
|
||||
# Also update record so dependents do not fail to
|
||||
# add this spec to their "needs"
|
||||
spec_record["needs_rebuild"] = rebuild_spec
|
||||
|
||||
dep_jobs = [c_spec]
|
||||
if enable_artifacts_buildcache:
|
||||
dep_jobs = [d for d in c_spec.traverse(deptype=all)]
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
bs["phase-name"],
|
||||
bs["strip-compilers"],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
else:
|
||||
debug_msg = "".join(
|
||||
[
|
||||
"Considered compiler {0} for spec ",
|
||||
"{1}, but rejected it either because it was ",
|
||||
"not the compiler required by the spec, or ",
|
||||
"because the target arch families of the ",
|
||||
"spec and the compiler did not match",
|
||||
]
|
||||
).format(c_spec, release_spec)
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec and spack_pipeline_type != "spack_copy_only":
|
||||
tty.debug(
|
||||
"Pruning {0}/{1}, does not need rebuild.".format(
|
||||
release_spec.name, release_spec.dag_hash()
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||
|
||||
# Only keep track of these if we are copying rebuilt cache entries
|
||||
if spack_buildcache_copy:
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
if artifacts_root:
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
)
|
||||
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
cdash_handler.current_spec = release_spec
|
||||
build_name = cdash_handler.build_name
|
||||
all_job_names.append(build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
|
||||
build_stamp = cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
job_object["artifacts"] = spack.config.merge_yaml(
|
||||
job_object.get("artifacts", {}),
|
||||
{
|
||||
"when": "always",
|
||||
"paths": [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
],
|
||||
},
|
||||
)
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
osname,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
rebuild_spec = spec_record.rebuild
|
||||
|
||||
length_needs = len(job_object["needs"])
|
||||
if length_needs > max_length_needs:
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
if not rebuild_spec and not copy_only_pipeline:
|
||||
if prune_dag:
|
||||
spec_record.reason = "Pruned, up-to-date"
|
||||
continue
|
||||
else:
|
||||
# DAG pruning is disabled, force the spec to rebuild. The
|
||||
# record still contains any mirrors on which the spec
|
||||
# may have been found, so we can print them in the staging
|
||||
# summary.
|
||||
spec_record.rebuild = True
|
||||
spec_record.reason = "Scheduled, DAG pruning disabled"
|
||||
|
||||
if spack_pipeline_type != "spack_copy_only":
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||
|
||||
# Only keep track of these if we are copying rebuilt cache entries
|
||||
if spack_buildcache_copy:
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
buildcache_copy_src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
buildcache_copy_dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
if artifacts_root:
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
)
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
# whether DAG pruning was enabled or not.
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||
|
||||
if cdash_handler:
|
||||
cdash_handler.current_spec = release_spec
|
||||
build_name = cdash_handler.build_name
|
||||
all_job_names.append(build_name)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
|
||||
build_stamp = cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
job_object["artifacts"] = spack.config.merge_yaml(
|
||||
job_object.get("artifacts", {}),
|
||||
{
|
||||
"when": "always",
|
||||
"paths": [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
|
||||
length_needs = len(job_object["needs"])
|
||||
if length_needs > max_length_needs:
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
|
||||
if not copy_only_pipeline:
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
||||
if print_summary:
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||
phase_stages = staged_phases[phase_name]
|
||||
_print_staging_summary(*phase_stages)
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||
|
||||
@@ -1330,7 +1187,7 @@ def main_script_replacements(cmd):
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if spack_pipeline_type == "spack_copy_only":
|
||||
if copy_only_pipeline and not config_deprecated:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
@@ -1474,12 +1331,18 @@ def main_script_replacements(cmd):
|
||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||
else:
|
||||
# No jobs were generated
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
|
||||
noop_job["retry"] = service_job_retries
|
||||
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
sorted_output = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
@@ -1560,44 +1423,6 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def configure_compilers(compiler_action, scope=None):
|
||||
"""Depending on the compiler_action parameter, either turn on the
|
||||
install_missing_compilers config option, or find spack compilers,
|
||||
or do nothing. This is used from rebuild jobs in bootstrapping
|
||||
pipelines, where in the bootsrapping phase we would pass
|
||||
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
||||
spec building phase we would pass INSTALL_MISSING in order to get
|
||||
spack to use the compiler which was built in the previous phase and
|
||||
is now sitting in the binary mirror.
|
||||
|
||||
Arguments:
|
||||
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
||||
described above. Any other value essentially results in a no-op.
|
||||
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
||||
compilers, in case 'FIND_ANY' was provided.
|
||||
"""
|
||||
if compiler_action == "INSTALL_MISSING":
|
||||
tty.debug("Make sure bootstrapped compiler will be installed")
|
||||
config = cfg.get("config")
|
||||
config["install_missing_compilers"] = True
|
||||
cfg.set("config", config)
|
||||
elif compiler_action == "FIND_ANY":
|
||||
tty.debug("Just find any available compiler")
|
||||
find_args = ["find"]
|
||||
if scope:
|
||||
find_args.extend(["--scope", scope])
|
||||
output = spack_compiler(*find_args)
|
||||
tty.debug("spack compiler find")
|
||||
tty.debug(output)
|
||||
output = spack_compiler("list")
|
||||
tty.debug("spack compiler list")
|
||||
tty.debug(output)
|
||||
else:
|
||||
tty.debug("No compiler action to be taken")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
unsigned = not sign_binaries
|
||||
|
||||
@@ -347,7 +347,7 @@ def iter_groups(specs, indent, all_headers):
|
||||
spack.spec.architecture_color,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
f"{compiler.name}@{compiler.version}" if compiler else "no compiler",
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
# Sometimes we want to display specs that are not yet concretized.
|
||||
|
||||
@@ -43,13 +43,6 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
# TODO: remove from Spack 0.21
|
||||
push.add_argument(
|
||||
"-r",
|
||||
"--rel",
|
||||
action="store_true",
|
||||
help="make all rpaths relative before creating tarballs. (deprecated)",
|
||||
)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
@@ -63,37 +56,7 @@ def setup_parser(subparser):
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
output = push.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# Unfortunately we cannot add this to the mutually exclusive group above,
|
||||
# because we have further positional arguments.
|
||||
# TODO: require from Spack 0.21
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
@@ -127,13 +90,6 @@ def setup_parser(subparser):
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
install.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
@@ -268,75 +224,21 @@ def setup_parser(subparser):
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob",
|
||||
default=None,
|
||||
help="A quoted glob pattern identifying copy manifest files",
|
||||
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
|
||||
)
|
||||
source = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Source mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="Name of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
source.add_argument(
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Source mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="Source mirror name, path, or URL",
|
||||
)
|
||||
dest = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Destination mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
type=arguments.mirror_name,
|
||||
dest="dest_mirror_flag",
|
||||
help="Name of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
dest.add_argument(
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="Destination mirror name, path, or URL",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -344,39 +246,8 @@ def setup_parser(subparser):
|
||||
update_index = subparsers.add_parser(
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: require from Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
@@ -436,32 +307,12 @@ def _concrete_spec_from_args(args):
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.mirror_flag:
|
||||
mirror = args.mirror_flag
|
||||
elif not args.mirror:
|
||||
raise ValueError("No mirror provided")
|
||||
else:
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.rel:
|
||||
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
||||
# spec is in the positional mirror arg due to argparse limitations.
|
||||
input_specs = args.specs
|
||||
if args.mirror_flag and args.mirror:
|
||||
input_specs.insert(0, args.mirror)
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(input_specs, args.spec_file),
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -486,7 +337,6 @@ def push_fn(args):
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
relative=args.rel,
|
||||
unsigned=args.unsigned,
|
||||
allow_root=args.allow_root,
|
||||
key=args.key,
|
||||
@@ -524,9 +374,6 @@ def install_fn(args):
|
||||
if not args.specs:
|
||||
tty.die("a spec argument is required to install from a buildcache")
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||
for match in matches:
|
||||
@@ -710,21 +557,11 @@ def sync_fn(args):
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
return 0
|
||||
|
||||
# If no manifest_glob, require a source and dest mirror.
|
||||
# TODO: Simplify in Spack 0.21
|
||||
if not (args.src_mirror_flag or args.src_mirror) or not (
|
||||
args.dest_mirror_flag or args.dest_mirror
|
||||
):
|
||||
raise ValueError("Source and destination mirror are required.")
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
tty.die("Provide mirrors to sync from and to.")
|
||||
|
||||
if args.src_mirror_flag or args.dest_mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
||||
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
||||
src_mirror = args.src_mirror
|
||||
dest_mirror = args.dest_mirror
|
||||
|
||||
src_mirror_url = src_mirror.fetch_url
|
||||
dest_mirror_url = dest_mirror.push_url
|
||||
@@ -803,13 +640,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
||||
update_index(mirror, update_keys=args.keys)
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
|
||||
@@ -228,7 +228,7 @@ def ci_reindex(args):
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = ev.config_dict(env.manifest)
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
||||
@@ -274,7 +274,6 @@ def ci_rebuild(args):
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
@@ -295,7 +294,6 @@ def ci_rebuild(args):
|
||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||
tty.debug("compiler_action = {0}".format(compiler_action))
|
||||
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
# CDash instance, and if so, gather some information from the manifest to
|
||||
@@ -411,14 +409,6 @@ def ci_rebuild(args):
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Depending on the specifics of this job, we might need to turn on the
|
||||
# "config:install_missing compilers" option (to build this job spec
|
||||
# with a bootstrapped compiler), or possibly run "spack compiler find"
|
||||
# (to build a bootstrap compiler or one of its deps in a
|
||||
# compiler-agnostic way), or maybe do nothing at all (to build a spec
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
|
||||
@@ -36,7 +36,10 @@ def shell_init_instructions(cmd, equivalent):
|
||||
" source %s/setup-env.fish" % spack.paths.share_path,
|
||||
"",
|
||||
color.colorize("@*c{For Windows batch:}"),
|
||||
" source %s/spack_cmd.bat" % spack.paths.share_path,
|
||||
" %s\\spack_cmd.bat" % spack.paths.bin_path,
|
||||
"",
|
||||
color.colorize("@*c{For PowerShell:}"),
|
||||
" %s\\setup-env.ps1" % spack.paths.share_path,
|
||||
"",
|
||||
"Or, if you do not want to use shell support, run "
|
||||
+ ("one of these" if shell_specific else "this")
|
||||
@@ -50,6 +53,7 @@ def shell_init_instructions(cmd, equivalent):
|
||||
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
||||
equivalent.format(sh_arg="--fish") + " # fish",
|
||||
equivalent.format(sh_arg="--bat ") + " # batch",
|
||||
equivalent.format(sh_arg="--pwsh") + " # powershell",
|
||||
]
|
||||
else:
|
||||
msg += [" " + equivalent]
|
||||
|
||||
@@ -349,7 +349,7 @@ def install_status():
|
||||
"-I",
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=False,
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
@@ -357,6 +357,17 @@ def install_status():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_install_status():
|
||||
return Args(
|
||||
"--no-install-status",
|
||||
dest="install_status",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="do not show install status annotations",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_checksum():
|
||||
return Args(
|
||||
|
||||
@@ -53,7 +53,7 @@ def setup_parser(subparser):
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -98,7 +98,7 @@ def compiler_find(args):
|
||||
config = spack.config.config
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
tty.msg("Compilers are defined in the following files:")
|
||||
@@ -106,19 +106,21 @@ def compiler_find(args):
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
elif not args.all and len(compilers) > 1:
|
||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
for compiler in compilers:
|
||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||
tty.msg("Removed compiler %s" % compiler.spec)
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
@@ -130,7 +132,7 @@ def compiler_info(args):
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
else:
|
||||
for c in compilers:
|
||||
print(str(c.spec) + ":")
|
||||
print(c.spec.display_str + ":")
|
||||
print("\tpaths:")
|
||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||
@@ -188,7 +190,7 @@ def compiler_list(args):
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec for c in compilers)))
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
|
||||
@@ -715,7 +715,7 @@ def __call__(self, stage, url):
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
lines = output.split("\n")
|
||||
lines = output.splitlines()
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
|
||||
@@ -86,6 +86,13 @@ def env_activate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print powershell commands to activate environment",
|
||||
)
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
@@ -302,7 +309,7 @@ def env_create(args):
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
_env_create(
|
||||
env = _env_create(
|
||||
args.create_env,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir,
|
||||
@@ -310,6 +317,9 @@ def env_create(args):
|
||||
keep_relative=args.keep_relative,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
@@ -79,6 +79,12 @@ def setup_parser(subparser):
|
||||
read_cray_manifest.add_argument(
|
||||
"--directory", default=None, help="specify a directory storing a group of manifest files"
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--ignore-default-dir",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="ignore the default directory of manifest files",
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
@@ -177,11 +183,16 @@ def external_read_cray_manifest(args):
|
||||
manifest_directory=args.directory,
|
||||
dry_run=args.dry_run,
|
||||
fail_on_error=args.fail_on_error,
|
||||
ignore_default_dir=args.ignore_default_dir,
|
||||
)
|
||||
|
||||
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
||||
manifest_file=None,
|
||||
manifest_directory=None,
|
||||
dry_run=False,
|
||||
fail_on_error=False,
|
||||
ignore_default_dir=False,
|
||||
):
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
@@ -191,7 +202,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
if manifest_directory:
|
||||
manifest_dirs.append(manifest_directory)
|
||||
|
||||
if os.path.isdir(cray_manifest.default_path):
|
||||
if not ignore_default_dir and os.path.isdir(cray_manifest.default_path):
|
||||
tty.debug(
|
||||
"Cray manifest path {0} exists: collecting all files to read.".format(
|
||||
cray_manifest.default_path
|
||||
|
||||
@@ -116,21 +116,23 @@ def one_spec_or_raise(specs):
|
||||
|
||||
|
||||
def check_module_set_name(name):
|
||||
modules_config = spack.config.get("modules")
|
||||
valid_names = set(
|
||||
[
|
||||
key
|
||||
for key, value in modules_config.items()
|
||||
if isinstance(value, dict) and value.get("enable", [])
|
||||
]
|
||||
)
|
||||
if "enable" in modules_config and modules_config["enable"]:
|
||||
valid_names.add("default")
|
||||
modules = spack.config.get("modules")
|
||||
if name != "prefix_inspections" and name in modules:
|
||||
return
|
||||
|
||||
if name not in valid_names:
|
||||
msg = "Cannot use invalid module set %s." % name
|
||||
msg += " Valid module set names are %s" % list(valid_names)
|
||||
raise spack.config.ConfigError(msg)
|
||||
names = [k for k in modules if k != "prefix_inspections"]
|
||||
|
||||
if not names:
|
||||
raise spack.config.ConfigError(
|
||||
f"Module set configuration is missing. Cannot use module set '{name}'"
|
||||
)
|
||||
|
||||
pretty_names = "', '".join(names)
|
||||
|
||||
raise spack.config.ConfigError(
|
||||
f"Cannot use invalid module set '{name}'.",
|
||||
f"Valid module set names are: '{pretty_names}'.",
|
||||
)
|
||||
|
||||
|
||||
_missing_modules_warning = (
|
||||
|
||||
@@ -44,7 +44,11 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
|
||||
@@ -31,7 +31,11 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
"-y",
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.19"
|
||||
tutorial_branch = "releases/v0.20"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
"implicit_rpaths",
|
||||
"extra_rpaths",
|
||||
]
|
||||
_cache_config_file = []
|
||||
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
@@ -112,36 +111,26 @@ def _to_dict(compiler):
|
||||
def get_compiler_config(scope=None, init_config=True):
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
def init_compiler_config():
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
if merged_config:
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
# Update the configuration if there are currently no compilers
|
||||
# configured. Avoid updating automatically if there ARE site
|
||||
# compilers configured but no user ones.
|
||||
if not config and init_config:
|
||||
if scope is None:
|
||||
# We know no compilers were configured in any scope.
|
||||
init_compiler_config()
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
elif scope == "user":
|
||||
# Check the site config and update the user config if
|
||||
# nothing is configured at the site level.
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
sys_config = spack.config.get("compilers", scope="system")
|
||||
if not site_config and not sys_config:
|
||||
init_compiler_config()
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
return config
|
||||
elif config:
|
||||
return config
|
||||
else:
|
||||
return [] # Return empty list which we will later append to.
|
||||
return config
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -165,52 +154,65 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
for compiler in compilers:
|
||||
compiler_config.append(_to_dict(compiler))
|
||||
global _cache_config_file
|
||||
_cache_config_file = compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from the config, by spec.
|
||||
"""Remove compilers from configuration by spec.
|
||||
|
||||
If scope is None, all the scopes are searched for removal.
|
||||
|
||||
Arguments:
|
||||
compiler_specs: a list of CompilerSpec objects.
|
||||
scope: configuration scope to modify.
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope to modify
|
||||
"""
|
||||
# Need a better way for this
|
||||
global _cache_config_file
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
return removal_happened
|
||||
|
||||
|
||||
def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
"""Removes a compiler from a specific configuration scope.
|
||||
|
||||
Args:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope under consideration
|
||||
|
||||
Returns:
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope)
|
||||
config_length = len(compiler_config)
|
||||
|
||||
filtered_compiler_config = [
|
||||
comp
|
||||
for comp in compiler_config
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
if not spack.spec.parse_with_version_concrete(
|
||||
comp["compiler"]["spec"], compiler=True
|
||||
compiler_entry["compiler"]["spec"], compiler=True
|
||||
).satisfies(compiler_spec)
|
||||
]
|
||||
|
||||
# Update the cache for changes
|
||||
_cache_config_file = filtered_compiler_config
|
||||
if len(filtered_compiler_config) == config_length: # No items removed
|
||||
CompilerSpecInsufficientlySpecificError(compiler_spec)
|
||||
spack.config.set("compilers", filtered_compiler_config, scope=scope)
|
||||
if len(filtered_compiler_config) == len(compiler_config):
|
||||
return False
|
||||
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
# Get compilers for this architecture.
|
||||
# Create a cache of the config file so we don't load all the time.
|
||||
global _cache_config_file
|
||||
if not _cache_config_file:
|
||||
_cache_config_file = get_compiler_config(scope, init_config)
|
||||
return _cache_config_file
|
||||
else:
|
||||
return _cache_config_file
|
||||
return get_compiler_config(scope, init_config)
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver).split("@")[1]
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
@@ -75,7 +75,7 @@ class Msvc(Compiler):
|
||||
# file based on compiler executable path.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0]) for pth in args[3]]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||
args[3][:] = new_pth
|
||||
super(Msvc, self).__init__(*args, **kwargs)
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
@@ -151,7 +151,11 @@ def setup_custom_environment(self, pkg, env):
|
||||
arch = arch.replace("-", "_")
|
||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||
# version, if needed by spec
|
||||
sdk_ver = "" if "win-sdk" not in pkg.spec else pkg.spec["win-sdk"].version.string + ".0"
|
||||
sdk_ver = (
|
||||
""
|
||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||
else pkg.spec["win-sdk"].version.string + ".0"
|
||||
)
|
||||
# provide vcvars with msvc version selected by concretization,
|
||||
# not whatever it happens to pick up on the system (highest available version)
|
||||
out = subprocess.check_output( # novermin
|
||||
|
||||
@@ -81,7 +81,7 @@
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update(dict((key, spack.schema.env.schema) for key in spack.schema.env.keys))
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
@@ -111,14 +111,6 @@
|
||||
overrides_base_name = "overrides-"
|
||||
|
||||
|
||||
def first_existing(dictionary, keys):
|
||||
"""Get the value of the first key in keys that is in the dictionary."""
|
||||
try:
|
||||
return next(k for k in keys if k in dictionary)
|
||||
except StopIteration:
|
||||
raise KeyError("None of %s is in dict!" % str(keys))
|
||||
|
||||
|
||||
class ConfigScope(object):
|
||||
"""This class represents a configuration scope.
|
||||
|
||||
@@ -838,12 +830,10 @@ def _config():
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
"""Add updates to a config from a filename"""
|
||||
import spack.environment as ev
|
||||
|
||||
# Get file as config dict
|
||||
# Extract internal attributes, if we are dealing with an environment
|
||||
data = read_config_file(filename)
|
||||
if any(k in data for k in spack.schema.env.keys):
|
||||
data = ev.config_dict(data)
|
||||
if spack.schema.env.TOP_LEVEL_KEY in data:
|
||||
data = data[spack.schema.env.TOP_LEVEL_KEY]
|
||||
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
@@ -1353,17 +1343,11 @@ def use_configuration(*scopes_or_paths):
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
|
||||
# Save and clear the current compiler cache
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
saved_config, config = config, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ def validate(configuration_file):
|
||||
config = syaml.load(f)
|
||||
|
||||
# Ensure we have a "container" attribute with sensible defaults set
|
||||
env_dict = ev.config_dict(config)
|
||||
env_dict = config[ev.TOP_LEVEL_KEY]
|
||||
env_dict.setdefault(
|
||||
"container", {"format": "docker", "images": {"os": "ubuntu:22.04", "spack": "develop"}}
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"template": "container/fedora_38.dockerfile",
|
||||
"image": "docker.io/fedora:38"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora38",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -31,7 +31,7 @@
|
||||
"template": "container/fedora_37.dockerfile",
|
||||
"image": "docker.io/fedora:37"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora37",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -45,7 +45,7 @@
|
||||
"template": "container/rockylinux_9.dockerfile",
|
||||
"image": "docker.io/rockylinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -59,7 +59,7 @@
|
||||
"template": "container/rockylinux_8.dockerfile",
|
||||
"image": "docker.io/rockylinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -73,7 +73,7 @@
|
||||
"template": "container/almalinux_9.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:9"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -87,7 +87,7 @@
|
||||
"template": "container/almalinux_8.dockerfile",
|
||||
"image": "quay.io/almalinux/almalinux:8"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
@@ -101,7 +101,7 @@
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
@@ -185,6 +185,16 @@
|
||||
"install": "apt-get -yqq install",
|
||||
"clean": "rm -rf /var/lib/apt/lists/*"
|
||||
},
|
||||
"dnf": {
|
||||
"update": "dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"dnf_epel": {
|
||||
"update": "dnf update -y && dnf install -y epel-release && dnf update -y",
|
||||
"install": "dnf install -y",
|
||||
"clean": "rm -rf /var/cache/dnf && dnf clean all"
|
||||
},
|
||||
"yum": {
|
||||
"update": "yum update -y && yum install -y epel-release && yum update -y",
|
||||
"install": "yum install -y",
|
||||
|
||||
@@ -50,7 +50,7 @@ def create(configuration, last_phase=None):
|
||||
configuration (dict): how to generate the current recipe
|
||||
last_phase (str): last phase to be printed or None to print them all
|
||||
"""
|
||||
name = ev.config_dict(configuration)["container"]["format"]
|
||||
name = configuration[ev.TOP_LEVEL_KEY]["container"]["format"]
|
||||
return _writer_factory[name](configuration, last_phase)
|
||||
|
||||
|
||||
@@ -138,7 +138,7 @@ class PathContext(tengine.Context):
|
||||
template_name: Optional[str] = None
|
||||
|
||||
def __init__(self, config, last_phase):
|
||||
self.config = ev.config_dict(config)
|
||||
self.config = config[ev.TOP_LEVEL_KEY]
|
||||
self.container_config = self.config["container"]
|
||||
|
||||
# Operating system tag as written in the configuration file
|
||||
|
||||
@@ -48,7 +48,8 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
def compiler_from_entry(entry):
|
||||
compiler_name = translated_compiler_name(entry["name"])
|
||||
paths = entry["executables"]
|
||||
version = entry["version"]
|
||||
# to instantiate a compiler class we may need a concrete version:
|
||||
version = "={}".format(entry["version"])
|
||||
arch = entry["arch"]
|
||||
operating_system = arch["os"]
|
||||
target = arch["target"]
|
||||
@@ -163,7 +164,10 @@ def entries_to_specs(entries):
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@
|
||||
# DB version. This is stuck in the DB file to track changes in format.
|
||||
# Increment by one when the database format changes.
|
||||
# Versions before 5 were not integers.
|
||||
_db_version = vn.Version("6")
|
||||
_db_version = vn.Version("7")
|
||||
|
||||
# For any version combinations here, skip reindex when upgrading.
|
||||
# Reindexing can take considerable time and is not always necessary.
|
||||
@@ -72,6 +72,7 @@
|
||||
# version is saved to disk the first time the DB is written.
|
||||
(vn.Version("0.9.3"), vn.Version("5")),
|
||||
(vn.Version("5"), vn.Version("6")),
|
||||
(vn.Version("6"), vn.Version("7")),
|
||||
]
|
||||
|
||||
# Default timeout for spack database locks in seconds or None (no timeout).
|
||||
@@ -105,7 +106,11 @@
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {vn.Version("5"): spack.spec.SpecfileV1, vn.Version("6"): spack.spec.SpecfileV3}
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
@@ -743,7 +748,9 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
@@ -766,7 +773,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes)
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1172,7 +1179,7 @@ def _add(
|
||||
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
|
||||
@@ -337,6 +337,7 @@
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
TOP_LEVEL_KEY,
|
||||
Environment,
|
||||
SpackEnvironmentError,
|
||||
SpackEnvironmentViewError,
|
||||
@@ -345,7 +346,6 @@
|
||||
active_environment,
|
||||
all_environment_names,
|
||||
all_environments,
|
||||
config_dict,
|
||||
create,
|
||||
create_in_dir,
|
||||
deactivate,
|
||||
@@ -369,6 +369,7 @@
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"TOP_LEVEL_KEY",
|
||||
"Environment",
|
||||
"SpackEnvironmentError",
|
||||
"SpackEnvironmentViewError",
|
||||
@@ -377,7 +378,6 @@
|
||||
"active_environment",
|
||||
"all_environment_names",
|
||||
"all_environments",
|
||||
"config_dict",
|
||||
"create",
|
||||
"create_in_dir",
|
||||
"deactivate",
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
import spack.version
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
@@ -124,7 +125,7 @@ def default_manifest_yaml():
|
||||
valid_environment_name_re = r"^\w[\w-]*$"
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 4
|
||||
lockfile_format_version = 5
|
||||
|
||||
|
||||
READER_CLS = {
|
||||
@@ -132,6 +133,7 @@ def default_manifest_yaml():
|
||||
2: spack.spec.SpecfileV1,
|
||||
3: spack.spec.SpecfileV2,
|
||||
4: spack.spec.SpecfileV3,
|
||||
5: spack.spec.SpecfileV4,
|
||||
}
|
||||
|
||||
|
||||
@@ -361,19 +363,6 @@ def ensure_env_root_path_exists():
|
||||
fs.mkdirp(env_root_path())
|
||||
|
||||
|
||||
def config_dict(yaml_data):
|
||||
"""Get the configuration scope section out of an spack.yaml"""
|
||||
# TODO (env:): Remove env: as a possible top level keyword in v0.21
|
||||
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
|
||||
if key == "env":
|
||||
msg = (
|
||||
"using 'env:' as a top-level attribute of a Spack environment is deprecated and "
|
||||
"will be removed in Spack v0.21. Please use 'spack:' instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
return yaml_data[key]
|
||||
|
||||
|
||||
def all_environment_names():
|
||||
"""List the names of environments that currently exist."""
|
||||
# just return empty if the env path does not exist. A read-only
|
||||
@@ -821,8 +810,8 @@ def write_transaction(self):
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Read manifest file and set up user specs."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
|
||||
for item in config_dict(self.manifest).get("definitions", []):
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
for item in env_configuration.get("definitions", []):
|
||||
entry = copy.deepcopy(item)
|
||||
when = _eval_conditional(entry.pop("when", "True"))
|
||||
assert len(entry) == 1
|
||||
@@ -834,13 +823,13 @@ def _construct_state_from_manifest(self):
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
spec_list = config_dict(self.manifest).get(user_speclist_name, [])
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
||||
)
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
enable_view = config_dict(self.manifest).get("view")
|
||||
enable_view = env_configuration.get("view")
|
||||
# enable_view can be boolean, string, or None
|
||||
if enable_view is True or enable_view is None:
|
||||
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
|
||||
@@ -855,14 +844,11 @@ def _construct_state_from_manifest(self):
|
||||
else:
|
||||
self.views = {}
|
||||
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.manifest)
|
||||
|
||||
# Retrieve unification scheme for the concretizer
|
||||
self.unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = copy.deepcopy(configuration.get("develop", {}))
|
||||
self.dev_specs = copy.deepcopy(env_configuration.get("develop", {}))
|
||||
for name, entry in self.dev_specs.items():
|
||||
# spec must include a concrete version
|
||||
assert Spec(entry["spec"]).versions.concrete_range_as_version
|
||||
@@ -982,7 +968,7 @@ def included_config_scopes(self):
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = config_dict(self.manifest).get("include", [])
|
||||
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
@@ -1075,10 +1061,7 @@ def env_file_config_scope(self):
|
||||
"""Get the configuration scope for the environment's manifest file."""
|
||||
config_name = self.env_file_config_scope_name()
|
||||
return spack.config.SingleFileScope(
|
||||
config_name,
|
||||
self.manifest_path,
|
||||
spack.schema.env.schema,
|
||||
[spack.config.first_existing(self.manifest, spack.schema.env.keys)],
|
||||
config_name, self.manifest_path, spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
def config_scopes(self):
|
||||
@@ -1221,28 +1204,27 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
old_specs = set(self.user_specs)
|
||||
new_specs = set()
|
||||
for spec in matches:
|
||||
if spec in list_to_change:
|
||||
try:
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
msg = f"Spec '{spec}' is part of a spec matrix and "
|
||||
msg += f"cannot be removed from list '{list_to_change}'."
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
new_specs.remove(spec)
|
||||
tty.warn(msg)
|
||||
if spec not in list_to_change:
|
||||
continue
|
||||
try:
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
msg = f"Spec '{spec}' is part of a spec matrix and "
|
||||
msg += f"cannot be removed from list '{list_to_change}'."
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
new_specs.remove(spec)
|
||||
tty.warn(msg)
|
||||
else:
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.remove_user_spec(str(spec))
|
||||
else:
|
||||
if list_name == user_speclist_name:
|
||||
for user_spec in matches:
|
||||
self.manifest.remove_user_spec(str(user_spec))
|
||||
else:
|
||||
for user_spec in matches:
|
||||
self.manifest.remove_definition(str(user_spec), list_name=list_name)
|
||||
self.manifest.remove_definition(str(spec), list_name=list_name)
|
||||
|
||||
# If force, update stale concretized specs
|
||||
for spec in old_specs - new_specs:
|
||||
@@ -1352,6 +1334,10 @@ def concretize(self, force=False, tests=False):
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
# Remove concrete specs that no longer correlate to a user spec
|
||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||
self.deconcretize(spec)
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.unify == "when_possible":
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
@@ -1365,6 +1351,16 @@ def concretize(self, force=False, tests=False):
|
||||
msg = "concretization strategy not implemented [{0}]"
|
||||
raise SpackEnvironmentError(msg.format(self.unify))
|
||||
|
||||
def deconcretize(self, spec):
|
||||
# spec has to be a root of the environment
|
||||
index = self.concretized_user_specs.index(spec)
|
||||
dag_hash = self.concretized_order.pop(index)
|
||||
del self.concretized_user_specs[index]
|
||||
|
||||
# If this was the only user spec that concretized to this concrete spec, remove it
|
||||
if dag_hash not in self.concretized_order:
|
||||
del self.specs_by_hash[dag_hash]
|
||||
|
||||
def _get_specs_to_concretize(
|
||||
self,
|
||||
) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]:
|
||||
@@ -1402,6 +1398,10 @@ def _concretize_together_where_possible(
|
||||
if not new_user_specs:
|
||||
return []
|
||||
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in self.concretized_specs()
|
||||
}
|
||||
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
@@ -1413,11 +1413,13 @@ def _concretize_together_where_possible(
|
||||
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
abstract = old_concrete_to_abstract.get(abstract, abstract)
|
||||
if abstract in new_user_specs:
|
||||
result.append((abstract, concrete))
|
||||
else:
|
||||
assert (abstract, concrete) in result
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
return result
|
||||
|
||||
def _concretize_together(
|
||||
@@ -1436,7 +1438,7 @@ def _concretize_together(
|
||||
self.specs_by_hash = {}
|
||||
|
||||
try:
|
||||
concrete_specs = spack.concretize.concretize_specs_together(
|
||||
concrete_specs: List[spack.spec.Spec] = spack.concretize.concretize_specs_together(
|
||||
*specs_to_concretize, tests=tests
|
||||
)
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
@@ -1455,11 +1457,14 @@ def _concretize_together(
|
||||
)
|
||||
raise
|
||||
|
||||
# zip truncates the longer list, which is exactly what we want here
|
||||
concretized_specs = [x for x in zip(new_user_specs | kept_user_specs, concrete_specs)]
|
||||
# set() | set() does not preserve ordering, even though sets are ordered
|
||||
ordered_user_specs = list(new_user_specs) + list(kept_user_specs)
|
||||
concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
return concretized_specs
|
||||
|
||||
# zip truncates the longer list, which is exactly what we want here
|
||||
return list(zip(new_user_specs, concrete_specs))
|
||||
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
@@ -1544,12 +1549,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_deps = node.dependencies(deptype="test")
|
||||
for test_dependency in test_deps:
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test"
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -2180,9 +2186,9 @@ def _read_lockfile_dict(self, d):
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
@@ -2662,8 +2668,8 @@ def add_user_spec(self, user_spec: str) -> None:
|
||||
Args:
|
||||
user_spec: user spec to be appended
|
||||
"""
|
||||
config_dict(self.pristine_yaml_content).setdefault("specs", []).append(user_spec)
|
||||
config_dict(self.yaml_content).setdefault("specs", []).append(user_spec)
|
||||
self.pristine_configuration.setdefault("specs", []).append(user_spec)
|
||||
self.configuration.setdefault("specs", []).append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_user_spec(self, user_spec: str) -> None:
|
||||
@@ -2676,8 +2682,8 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
SpackEnvironmentError: when the user spec is not in the list
|
||||
"""
|
||||
try:
|
||||
config_dict(self.pristine_yaml_content)["specs"].remove(user_spec)
|
||||
config_dict(self.yaml_content)["specs"].remove(user_spec)
|
||||
self.pristine_configuration["specs"].remove(user_spec)
|
||||
self.configuration["specs"].remove(user_spec)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
@@ -2694,8 +2700,8 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
SpackEnvironmentError: when the user spec cannot be overridden
|
||||
"""
|
||||
try:
|
||||
config_dict(self.pristine_yaml_content)["specs"][idx] = user_spec
|
||||
config_dict(self.yaml_content)["specs"][idx] = user_spec
|
||||
self.pristine_configuration["specs"][idx] = user_spec
|
||||
self.configuration["specs"][idx] = user_spec
|
||||
except ValueError as e:
|
||||
msg = f"cannot override {user_spec} from {self}"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
@@ -2711,14 +2717,14 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
Raises:
|
||||
SpackEnvironmentError: is no valid definition exists already
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name].append(user_spec)
|
||||
self.configuration["definitions"][idx][list_name].append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@@ -2732,7 +2738,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
SpackEnvironmentError: if the user spec cannot be removed from the list,
|
||||
or the list does not exist
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = (
|
||||
f"cannot remove {user_spec} from the '{list_name}' definition, "
|
||||
f"no valid list exists"
|
||||
@@ -2745,7 +2751,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name].remove(user_spec)
|
||||
self.configuration["definitions"][idx][list_name].remove(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@@ -2760,7 +2766,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
Raises:
|
||||
SpackEnvironmentError: if the user spec cannot be overridden
|
||||
"""
|
||||
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
@@ -2771,7 +2777,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
config_dict(self.yaml_content)["definitions"][idx][list_name][sub_index] = override
|
||||
self.configuration["definitions"][idx][list_name][sub_index] = override
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
@@ -2803,24 +2809,24 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
||||
True the default view is used for the environment, if False there's no view.
|
||||
"""
|
||||
if isinstance(view, dict):
|
||||
config_dict(self.pristine_yaml_content)["view"][default_view_name].update(view)
|
||||
config_dict(self.yaml_content)["view"][default_view_name].update(view)
|
||||
self.pristine_configuration["view"][default_view_name].update(view)
|
||||
self.configuration["view"][default_view_name].update(view)
|
||||
self.changed = True
|
||||
return
|
||||
|
||||
if not isinstance(view, bool):
|
||||
view = str(view)
|
||||
|
||||
config_dict(self.pristine_yaml_content)["view"] = view
|
||||
config_dict(self.yaml_content)["view"] = view
|
||||
self.pristine_configuration["view"] = view
|
||||
self.configuration["view"] = view
|
||||
self.changed = True
|
||||
|
||||
def remove_default_view(self) -> None:
|
||||
"""Removes the default view from the manifest file"""
|
||||
view_data = config_dict(self.pristine_yaml_content).get("view")
|
||||
view_data = self.pristine_configuration.get("view")
|
||||
if isinstance(view_data, collections.abc.Mapping):
|
||||
config_dict(self.pristine_yaml_content)["view"].pop(default_view_name)
|
||||
config_dict(self.yaml_content)["view"].pop(default_view_name)
|
||||
self.pristine_configuration["view"].pop(default_view_name)
|
||||
self.configuration["view"].pop(default_view_name)
|
||||
self.changed = True
|
||||
return
|
||||
|
||||
@@ -2837,12 +2843,10 @@ def add_develop_spec(self, pkg_name: str, entry: Dict[str, str]) -> None:
|
||||
if entry["path"] == pkg_name:
|
||||
entry.pop("path")
|
||||
|
||||
config_dict(self.pristine_yaml_content).setdefault("develop", {}).setdefault(
|
||||
pkg_name, {}
|
||||
).update(entry)
|
||||
config_dict(self.yaml_content).setdefault("develop", {}).setdefault(pkg_name, {}).update(
|
||||
self.pristine_configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(
|
||||
entry
|
||||
)
|
||||
self.configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(entry)
|
||||
self.changed = True
|
||||
|
||||
def remove_develop_spec(self, pkg_name: str) -> None:
|
||||
@@ -2855,11 +2859,11 @@ def remove_develop_spec(self, pkg_name: str) -> None:
|
||||
SpackEnvironmentError: if there is nothing to remove
|
||||
"""
|
||||
try:
|
||||
del config_dict(self.pristine_yaml_content)["develop"][pkg_name]
|
||||
del self.pristine_configuration["develop"][pkg_name]
|
||||
except KeyError as e:
|
||||
msg = f"cannot remove '{pkg_name}' from develop specs in {self}, entry does not exist"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
del config_dict(self.yaml_content)["develop"][pkg_name]
|
||||
del self.configuration["develop"][pkg_name]
|
||||
self.changed = True
|
||||
|
||||
def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
|
||||
@@ -2870,11 +2874,11 @@ def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
|
||||
init_file_dir: directory with the "spack.yaml" used to initialize the environment.
|
||||
"""
|
||||
init_file_dir = pathlib.Path(init_file_dir).absolute()
|
||||
for _, entry in config_dict(self.pristine_yaml_content).get("develop", {}).items():
|
||||
for _, entry in self.pristine_configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
|
||||
for _, entry in config_dict(self.yaml_content).get("develop", {}).items():
|
||||
for _, entry in self.configuration.get("develop", {}).items():
|
||||
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
|
||||
entry["path"] = str(expanded_path)
|
||||
self.changed = True
|
||||
@@ -2888,6 +2892,16 @@ def flush(self) -> None:
|
||||
_write_yaml(self.pristine_yaml_content, f)
|
||||
self.changed = False
|
||||
|
||||
@property
|
||||
def pristine_configuration(self):
|
||||
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
||||
return self.pristine_yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
@property
|
||||
def configuration(self):
|
||||
"""Return the dictionaries in the YAML, without the top level attribute"""
|
||||
return self.yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.yaml_content)
|
||||
|
||||
|
||||
@@ -42,6 +42,8 @@ def activate_header(env, shell, prompt=None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV=%s\n" % env.path
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
@@ -79,6 +81,8 @@ def deactivate_header(shell):
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Remove-Item Env:SPACK_ENV"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
||||
@@ -544,6 +544,7 @@ def _static_edges(specs, deptype):
|
||||
spack.spec.Spec(parent_name),
|
||||
spack.spec.Spec(dependency_name),
|
||||
deptypes=deptype,
|
||||
virtuals=(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -215,6 +215,31 @@ def print_message(logger: LogType, msg: str, verbose: bool = False):
|
||||
tty.info(msg, format="g")
|
||||
|
||||
|
||||
def overall_status(current_status: "TestStatus", substatuses: List["TestStatus"]) -> "TestStatus":
|
||||
"""Determine the overall status based on the current and associated sub status values.
|
||||
|
||||
Args:
|
||||
current_status: current overall status, assumed to default to PASSED
|
||||
substatuses: status of each test part or overall status of each test spec
|
||||
Returns:
|
||||
test status encompassing the main test and all subtests
|
||||
"""
|
||||
if current_status in [TestStatus.SKIPPED, TestStatus.NO_TESTS, TestStatus.FAILED]:
|
||||
return current_status
|
||||
|
||||
skipped = 0
|
||||
for status in substatuses:
|
||||
if status == TestStatus.FAILED:
|
||||
return status
|
||||
elif status == TestStatus.SKIPPED:
|
||||
skipped += 1
|
||||
|
||||
if skipped and skipped == len(substatuses):
|
||||
return TestStatus.SKIPPED
|
||||
|
||||
return current_status
|
||||
|
||||
|
||||
class PackageTest:
|
||||
"""The class that manages stand-alone (post-install) package tests."""
|
||||
|
||||
@@ -308,14 +333,12 @@ def status(self, name: str, status: "TestStatus", msg: Optional[str] = None):
|
||||
# to start with the same name) may not have PASSED. This extra
|
||||
# check is used to ensure the containing test part is not claiming
|
||||
# to have passed when at least one subpart failed.
|
||||
if status == TestStatus.PASSED:
|
||||
for pname, substatus in self.test_parts.items():
|
||||
if pname != part_name and pname.startswith(part_name):
|
||||
if substatus == TestStatus.FAILED:
|
||||
print(f"{substatus}: {part_name}{extra}")
|
||||
self.test_parts[part_name] = substatus
|
||||
self.counts[substatus] += 1
|
||||
return
|
||||
substatuses = []
|
||||
for pname, substatus in self.test_parts.items():
|
||||
if pname != part_name and pname.startswith(part_name):
|
||||
substatuses.append(substatus)
|
||||
if substatuses:
|
||||
status = overall_status(status, substatuses)
|
||||
|
||||
print(f"{status}: {part_name}{extra}")
|
||||
self.test_parts[part_name] = status
|
||||
@@ -420,6 +443,26 @@ def summarize(self):
|
||||
lines.append(f"{totals:=^80}")
|
||||
return lines
|
||||
|
||||
def write_tested_status(self):
|
||||
"""Write the overall status to the tested file.
|
||||
|
||||
If there any test part failures, then the tests failed. If all test
|
||||
parts are skipped, then the tests were skipped. If any tests passed
|
||||
then the tests passed; otherwise, there were not tests executed.
|
||||
"""
|
||||
status = TestStatus.NO_TESTS
|
||||
if self.counts[TestStatus.FAILED] > 0:
|
||||
status = TestStatus.FAILED
|
||||
else:
|
||||
skipped = self.counts[TestStatus.SKIPPED]
|
||||
if skipped and self.parts() == skipped:
|
||||
status = TestStatus.SKIPPED
|
||||
elif self.counts[TestStatus.PASSED] > 0:
|
||||
status = TestStatus.PASSED
|
||||
|
||||
with open(self.tested_file, "w") as f:
|
||||
f.write(f"{status.value}\n")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
|
||||
@@ -654,8 +697,9 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
||||
try:
|
||||
tests = test_functions(spec.package_class)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# some virtuals don't have a package
|
||||
tests = []
|
||||
# Some virtuals don't have a package so we don't want to report
|
||||
# them as not having tests when that isn't appropriate.
|
||||
continue
|
||||
|
||||
if len(tests) == 0:
|
||||
tester.status(spec.name, TestStatus.NO_TESTS)
|
||||
@@ -682,7 +726,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
|
||||
|
||||
finally:
|
||||
if tester.ran_tests():
|
||||
fs.touch(tester.tested_file)
|
||||
tester.write_tested_status()
|
||||
|
||||
# log one more test message to provide a completion timestamp
|
||||
# for CDash reporting
|
||||
@@ -889,20 +933,15 @@ def __call__(self, *args, **kwargs):
|
||||
if remove_directory:
|
||||
shutil.rmtree(test_dir)
|
||||
|
||||
tested = os.path.exists(self.tested_file_for_spec(spec))
|
||||
if tested:
|
||||
status = TestStatus.PASSED
|
||||
else:
|
||||
self.ensure_stage()
|
||||
if spec.external and not externals:
|
||||
status = TestStatus.SKIPPED
|
||||
elif not spec.installed:
|
||||
status = TestStatus.SKIPPED
|
||||
else:
|
||||
status = TestStatus.NO_TESTS
|
||||
status = self.test_status(spec, externals)
|
||||
self.counts[status] += 1
|
||||
|
||||
self.write_test_result(spec, status)
|
||||
|
||||
except SkipTest:
|
||||
status = TestStatus.SKIPPED
|
||||
self.counts[status] += 1
|
||||
self.write_test_result(spec, TestStatus.SKIPPED)
|
||||
|
||||
except BaseException as exc:
|
||||
status = TestStatus.FAILED
|
||||
self.counts[status] += 1
|
||||
@@ -939,6 +978,31 @@ def __call__(self, *args, **kwargs):
|
||||
if failures:
|
||||
raise TestSuiteFailure(failures)
|
||||
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
|
||||
"""Determine the overall test results status for the spec.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
externals: ``True`` if externals are to be tested, else ``False``
|
||||
|
||||
Returns:
|
||||
the spec's test status if available or ``None``
|
||||
"""
|
||||
tests_status_file = self.tested_file_for_spec(spec)
|
||||
if not os.path.exists(tests_status_file):
|
||||
self.ensure_stage()
|
||||
if spec.external and not externals:
|
||||
status = TestStatus.SKIPPED
|
||||
elif not spec.installed:
|
||||
status = TestStatus.SKIPPED
|
||||
else:
|
||||
status = TestStatus.NO_TESTS
|
||||
return status
|
||||
|
||||
with open(tests_status_file, "r") as f:
|
||||
value = (f.read()).strip("\n")
|
||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||
|
||||
def ensure_stage(self):
|
||||
"""Ensure the test suite stage directory exists."""
|
||||
if not os.path.exists(self.stage):
|
||||
|
||||
@@ -231,7 +231,9 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
dep.concretize()
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",)))
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
packages.append((dep.package, True))
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
@@ -170,17 +170,10 @@ def merge_config_rules(configuration, spec):
|
||||
Returns:
|
||||
dict: actions to be taken on the spec passed as an argument
|
||||
"""
|
||||
|
||||
# Get the top-level configuration for the module type we are using
|
||||
module_specific_configuration = copy.deepcopy(configuration)
|
||||
|
||||
# Construct a dictionary with the actions we need to perform on the spec
|
||||
# passed as a parameter
|
||||
|
||||
# The keyword 'all' is always evaluated first, all the others are
|
||||
# evaluated in order of appearance in the module file
|
||||
spec_configuration = module_specific_configuration.pop("all", {})
|
||||
for constraint, action in module_specific_configuration.items():
|
||||
spec_configuration = copy.deepcopy(configuration.get("all", {}))
|
||||
for constraint, action in configuration.items():
|
||||
if spec.satisfies(constraint):
|
||||
if hasattr(constraint, "override") and constraint.override:
|
||||
spec_configuration = {}
|
||||
@@ -200,14 +193,14 @@ def merge_config_rules(configuration, spec):
|
||||
# configuration
|
||||
|
||||
# Hash length in module files
|
||||
hash_length = module_specific_configuration.get("hash_length", 7)
|
||||
hash_length = configuration.get("hash_length", 7)
|
||||
spec_configuration["hash_length"] = hash_length
|
||||
|
||||
verbose = module_specific_configuration.get("verbose", False)
|
||||
verbose = configuration.get("verbose", False)
|
||||
spec_configuration["verbose"] = verbose
|
||||
|
||||
# module defaults per-package
|
||||
defaults = module_specific_configuration.get("defaults", [])
|
||||
defaults = configuration.get("defaults", [])
|
||||
spec_configuration["defaults"] = defaults
|
||||
|
||||
return spec_configuration
|
||||
@@ -400,7 +393,7 @@ class BaseConfiguration(object):
|
||||
querying easier. It needs to be sub-classed for specific module types.
|
||||
"""
|
||||
|
||||
default_projections = {"all": "{name}-{version}-{compiler.name}-{compiler.version}"}
|
||||
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
||||
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
# Module where type(self) is defined
|
||||
@@ -678,7 +671,14 @@ def configure_options(self):
|
||||
# the configure option section
|
||||
return None
|
||||
|
||||
def modification_needs_formatting(self, modification):
|
||||
"""Returns True if environment modification entry needs to be formatted."""
|
||||
return (
|
||||
not isinstance(modification, (spack.util.environment.SetEnv)) or not modification.raw
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@memoized
|
||||
def environment_modifications(self):
|
||||
"""List of environment modifications to be processed."""
|
||||
# Modifications guessed by inspecting the spec prefix
|
||||
@@ -740,15 +740,29 @@ def environment_modifications(self):
|
||||
_check_tokens_are_valid(x.name, message=msg)
|
||||
# Transform them
|
||||
x.name = spec.format(x.name, transform=transform)
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
if self.modification_needs_formatting(x):
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
x.name = str(x.name).replace("-", "_")
|
||||
|
||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
||||
|
||||
@tengine.context_property
|
||||
def has_manpath_modifications(self):
|
||||
"""True if MANPATH environment variable is modified."""
|
||||
for modification_type, cmd in self.environment_modifications:
|
||||
if not isinstance(
|
||||
cmd, (spack.util.environment.PrependPath, spack.util.environment.AppendPath)
|
||||
):
|
||||
continue
|
||||
if cmd.name == "MANPATH":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@tengine.context_property
|
||||
def autoload(self):
|
||||
"""List of modules that needs to be loaded automatically."""
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import itertools
|
||||
import os.path
|
||||
import posixpath
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
@@ -56,7 +56,7 @@ def make_context(spec, module_set_name, explicit):
|
||||
return LmodContext(conf)
|
||||
|
||||
|
||||
def guess_core_compilers(name, store=False):
|
||||
def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
|
||||
"""Guesses the list of core compilers installed in the system.
|
||||
|
||||
Args:
|
||||
@@ -64,21 +64,19 @@ def guess_core_compilers(name, store=False):
|
||||
modules.yaml configuration file
|
||||
|
||||
Returns:
|
||||
List of core compilers, if found, or None
|
||||
List of found core compilers
|
||||
"""
|
||||
core_compilers = []
|
||||
for compiler_config in spack.compilers.all_compilers_config():
|
||||
for compiler in spack.compilers.all_compilers():
|
||||
try:
|
||||
compiler = compiler_config["compiler"]
|
||||
# A compiler is considered to be a core compiler if any of the
|
||||
# C, C++ or Fortran compilers reside in a system directory
|
||||
is_system_compiler = any(
|
||||
os.path.dirname(x) in spack.util.environment.SYSTEM_DIRS
|
||||
for x in compiler["paths"].values()
|
||||
if x is not None
|
||||
os.path.dirname(getattr(compiler, x, "")) in spack.util.environment.SYSTEM_DIRS
|
||||
for x in ("cc", "cxx", "f77", "fc")
|
||||
)
|
||||
if is_system_compiler:
|
||||
core_compilers.append(str(compiler["spec"]))
|
||||
core_compilers.append(compiler.spec)
|
||||
except (KeyError, TypeError, AttributeError):
|
||||
continue
|
||||
|
||||
@@ -89,10 +87,10 @@ def guess_core_compilers(name, store=False):
|
||||
modules_cfg = spack.config.get(
|
||||
"modules:" + name, {}, scope=spack.config.default_modify_scope()
|
||||
)
|
||||
modules_cfg.setdefault("lmod", {})["core_compilers"] = core_compilers
|
||||
modules_cfg.setdefault("lmod", {})["core_compilers"] = [str(x) for x in core_compilers]
|
||||
spack.config.set("modules:" + name, modules_cfg, scope=spack.config.default_modify_scope())
|
||||
|
||||
return core_compilers or None
|
||||
return core_compilers
|
||||
|
||||
|
||||
class LmodConfiguration(BaseConfiguration):
|
||||
@@ -104,7 +102,7 @@ class LmodConfiguration(BaseConfiguration):
|
||||
default_projections = {"all": posixpath.join("{name}", "{version}")}
|
||||
|
||||
@property
|
||||
def core_compilers(self):
|
||||
def core_compilers(self) -> List[spack.spec.CompilerSpec]:
|
||||
"""Returns the list of "Core" compilers
|
||||
|
||||
Raises:
|
||||
@@ -112,14 +110,18 @@ def core_compilers(self):
|
||||
specified in the configuration file or the sequence
|
||||
is empty
|
||||
"""
|
||||
value = configuration(self.name).get("core_compilers") or guess_core_compilers(
|
||||
self.name, store=True
|
||||
)
|
||||
compilers = [
|
||||
spack.spec.CompilerSpec(c) for c in configuration(self.name).get("core_compilers", [])
|
||||
]
|
||||
|
||||
if not value:
|
||||
if not compilers:
|
||||
compilers = guess_core_compilers(self.name, store=True)
|
||||
|
||||
if not compilers:
|
||||
msg = 'the key "core_compilers" must be set in modules.yaml'
|
||||
raise CoreCompilersNotFoundError(msg)
|
||||
return value
|
||||
|
||||
return compilers
|
||||
|
||||
@property
|
||||
def core_specs(self):
|
||||
@@ -132,6 +134,7 @@ def filter_hierarchy_specs(self):
|
||||
return configuration(self.name).get("filter_hierarchy_specs", {})
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def hierarchy_tokens(self):
|
||||
"""Returns the list of tokens that are part of the modulefile
|
||||
hierarchy. 'compiler' is always present.
|
||||
@@ -156,6 +159,7 @@ def hierarchy_tokens(self):
|
||||
return tokens
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def requires(self):
|
||||
"""Returns a dictionary mapping all the requirements of this spec
|
||||
to the actual provider. 'compiler' is always present among the
|
||||
@@ -222,6 +226,7 @@ def available(self):
|
||||
return available
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def missing(self):
|
||||
"""Returns the list of tokens that are not available."""
|
||||
return [x for x in self.hierarchy_tokens if x not in self.available]
|
||||
@@ -283,16 +288,18 @@ def token_to_path(self, name, value):
|
||||
|
||||
# If we are dealing with a core compiler, return 'Core'
|
||||
core_compilers = self.conf.core_compilers
|
||||
if name == "compiler" and str(value) in core_compilers:
|
||||
if name == "compiler" and any(
|
||||
spack.spec.CompilerSpec(value).satisfies(c) for c in core_compilers
|
||||
):
|
||||
return "Core"
|
||||
|
||||
# CompilerSpec does not have an hash, as we are not allowed to
|
||||
# CompilerSpec does not have a hash, as we are not allowed to
|
||||
# use different flavors of the same compiler
|
||||
if name == "compiler":
|
||||
return path_part_fmt.format(token=value)
|
||||
|
||||
# In case the hierarchy token refers to a virtual provider
|
||||
# we need to append an hash to the version to distinguish
|
||||
# we need to append a hash to the version to distinguish
|
||||
# among flavors of the same library (e.g. openblas~openmp vs.
|
||||
# openblas+openmp)
|
||||
path = path_part_fmt.format(token=value)
|
||||
@@ -313,6 +320,7 @@ def available_path_parts(self):
|
||||
return parts
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
def unlocked_paths(self):
|
||||
"""Returns a dictionary mapping conditions to a list of unlocked
|
||||
paths.
|
||||
@@ -424,6 +432,7 @@ def missing(self):
|
||||
return self.conf.missing
|
||||
|
||||
@tengine.context_property
|
||||
@lang.memoized
|
||||
def unlocked_paths(self):
|
||||
"""Returns the list of paths that are unlocked unconditionally."""
|
||||
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
||||
|
||||
@@ -108,6 +108,5 @@
|
||||
# These are just here for editor support; they will be replaced when the build env
|
||||
# is set up.
|
||||
make = MakeExecutable("make", jobs=1)
|
||||
gmake = MakeExecutable("gmake", jobs=1)
|
||||
ninja = MakeExecutable("ninja", jobs=1)
|
||||
configure = Executable(join_path(".", "configure"))
|
||||
|
||||
@@ -1231,6 +1231,7 @@ def dependencies_of_type(cls, *deptypes):
|
||||
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
|
||||
)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
@property
|
||||
def extendee_spec(self):
|
||||
"""
|
||||
@@ -1246,7 +1247,6 @@ def extendee_spec(self):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
return deps[0]
|
||||
@@ -1256,7 +1256,6 @@ def extendee_spec(self):
|
||||
if self.spec._concrete:
|
||||
return None
|
||||
else:
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
||||
@@ -2017,7 +2016,8 @@ def test_title(purpose, test_name):
|
||||
# stack instead of from traceback.
|
||||
# The traceback is truncated here, so we can't use it to
|
||||
# traverse the stack.
|
||||
m = "\n".join(spack.build_environment.get_package_context(tb))
|
||||
context = spack.build_environment.get_package_context(tb)
|
||||
m = "\n".join(context) if context else ""
|
||||
|
||||
exc = e # e is deleted after this block
|
||||
|
||||
|
||||
@@ -291,7 +291,7 @@ def next_spec(
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=())
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -37,7 +37,9 @@
|
||||
|
||||
|
||||
def slingshot_network():
|
||||
return os.path.exists("/opt/cray/pe") and os.path.exists("/lib64/libcxi.so")
|
||||
return os.path.exists("/opt/cray/pe") and (
|
||||
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
||||
)
|
||||
|
||||
|
||||
def _target_name_from_craype_target_name(name):
|
||||
|
||||
@@ -292,8 +292,8 @@ def from_json(stream, repository):
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
lambda vpkg, plist: (
|
||||
spack.spec.SpecfileV3.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
|
||||
spack.spec.SpecfileV4.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV4.from_node_dict(p) for p in plist),
|
||||
),
|
||||
)
|
||||
return index
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from collections import OrderedDict
|
||||
|
||||
import macholib.mach_o
|
||||
@@ -356,13 +355,7 @@ def _set_elf_rpaths(target, rpaths):
|
||||
# Join the paths using ':' as a separator
|
||||
rpaths_str = ":".join(rpaths)
|
||||
|
||||
# If we're relocating patchelf itself, make a copy and use it
|
||||
bak_path = None
|
||||
if target.endswith("/bin/patchelf"):
|
||||
bak_path = target + ".bak"
|
||||
shutil.copy(target, bak_path)
|
||||
|
||||
patchelf, output = executable.Executable(bak_path or _patchelf()), None
|
||||
patchelf, output = executable.Executable(_patchelf()), None
|
||||
try:
|
||||
# TODO: revisit the use of --force-rpath as it might be conditional
|
||||
# TODO: if we want to support setting RUNPATH from binary packages
|
||||
@@ -371,9 +364,6 @@ def _set_elf_rpaths(target, rpaths):
|
||||
except executable.ProcessError as e:
|
||||
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
|
||||
tty.warn(msg.format(target, e))
|
||||
finally:
|
||||
if bak_path and os.path.exists(bak_path):
|
||||
os.remove(bak_path)
|
||||
return output
|
||||
|
||||
|
||||
@@ -686,7 +676,7 @@ def is_relocatable(spec):
|
||||
Raises:
|
||||
ValueError: if the spec is not installed
|
||||
"""
|
||||
if not spec.install_status():
|
||||
if not spec.installed:
|
||||
raise ValueError("spec is not installed [{0}]".format(str(spec)))
|
||||
|
||||
if spec.external or spec.virtual:
|
||||
|
||||
@@ -1239,7 +1239,7 @@ def get_pkg_class(self, pkg_name):
|
||||
try:
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
raise UnknownPackageError(pkg_name)
|
||||
raise UnknownPackageError(fullname)
|
||||
except Exception as e:
|
||||
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
||||
raise RepoError(msg) from e
|
||||
|
||||
@@ -134,23 +134,6 @@
|
||||
core_shared_properties = union_dicts(
|
||||
{
|
||||
"pipeline-gen": pipeline_gen_schema,
|
||||
"bootstrap": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"compiler-agnostic": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
|
||||
@@ -209,7 +192,7 @@ def update(data):
|
||||
# Warn if deprecated section is still in the environment
|
||||
ci_env = ev.active_environment()
|
||||
if ci_env:
|
||||
env_config = ev.config_dict(ci_env.manifest)
|
||||
env_config = ci_env.manifest[ev.TOP_LEVEL_KEY]
|
||||
if "gitlab-ci" in env_config:
|
||||
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")
|
||||
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
|
||||
#: legal first keys in the schema
|
||||
keys = ("spack", "env")
|
||||
#: Top level key in a manifest file
|
||||
TOP_LEVEL_KEY = "spack"
|
||||
|
||||
spec_list_schema = {
|
||||
"type": "array",
|
||||
@@ -47,8 +47,8 @@
|
||||
"title": "Spack environment file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {
|
||||
"^env|spack$": {
|
||||
"properties": {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
|
||||
@@ -861,9 +861,9 @@ class SpackSolverSetup(object):
|
||||
def __init__(self, tests=False):
|
||||
self.gen = None # set by setup()
|
||||
|
||||
self.declared_versions = {}
|
||||
self.possible_versions = {}
|
||||
self.deprecated_versions = {}
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
self.deprecated_versions = collections.defaultdict(set)
|
||||
|
||||
self.possible_virtuals = None
|
||||
self.possible_compilers = []
|
||||
@@ -1669,9 +1669,34 @@ class Body(object):
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||
|
||||
# Ensure Spack will not coconcretize this with another provider
|
||||
# for the same virtual
|
||||
for virtual in dep.package.virtuals_provided:
|
||||
# TODO: We have to look up info from package.py here, but we'd
|
||||
# TODO: like to avoid this entirely. We should not need to look
|
||||
# TODO: up potentially wrong info if we have virtual edge info.
|
||||
try:
|
||||
try:
|
||||
pkg = dep.package
|
||||
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
# Try to look up the package of the same name and use its
|
||||
# providers. This is as good as we can do without edge info.
|
||||
pkg_class = spack.repo.path.get_pkg_class(dep.name)
|
||||
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
|
||||
pkg = pkg_class(spec)
|
||||
|
||||
virtuals = pkg.virtuals_provided
|
||||
|
||||
except spack.repo.UnknownPackageError:
|
||||
# Skip virtual node constriants for renamed/deleted packages,
|
||||
# so their binaries can still be installed.
|
||||
# NOTE: with current specs (which lack edge attributes) this
|
||||
# can allow concretizations with two providers, but it's unlikely.
|
||||
continue
|
||||
|
||||
# Don't concretize with two providers of the same virtual.
|
||||
# See above for exception for unknown packages.
|
||||
# TODO: we will eventually record provider information on edges,
|
||||
# TODO: which avoids the need for the package lookup above.
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("virtual_node", virtual.name))
|
||||
clauses.append(fn.provider(dep.name, virtual.name))
|
||||
|
||||
@@ -1697,10 +1722,6 @@ class Body(object):
|
||||
|
||||
def build_version_dict(self, possible_pkgs):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
self.deprecated_versions = collections.defaultdict(set)
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
@@ -1734,13 +1755,47 @@ def key_fn(item):
|
||||
# All the preferred version from packages.yaml, versions in external
|
||||
# specs will be computed later
|
||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||
for idx, v in enumerate(version_preferences):
|
||||
# v can be a string so force it into an actual version for comparisons
|
||||
ver = vn.Version(v)
|
||||
version_defs = []
|
||||
pkg_class = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for vstr in version_preferences:
|
||||
v = vn.ver(vstr)
|
||||
if isinstance(v, vn.GitVersion):
|
||||
version_defs.append(v)
|
||||
else:
|
||||
satisfying_versions = self._check_for_defined_matching_versions(pkg_class, v)
|
||||
# Amongst all defined versions satisfying this specific
|
||||
# preference, the highest-numbered version is the
|
||||
# most-preferred: therefore sort satisfying versions
|
||||
# from greatest to least
|
||||
version_defs.extend(sorted(satisfying_versions, reverse=True))
|
||||
|
||||
for weight, vdef in enumerate(llnl.util.lang.dedupe(version_defs)):
|
||||
self.declared_versions[pkg_name].append(
|
||||
DeclaredVersion(version=ver, idx=idx, origin=Provenance.PACKAGES_YAML)
|
||||
DeclaredVersion(version=vdef, idx=weight, origin=Provenance.PACKAGES_YAML)
|
||||
)
|
||||
self.possible_versions[pkg_name].add(ver)
|
||||
self.possible_versions[pkg_name].add(vdef)
|
||||
|
||||
def _check_for_defined_matching_versions(self, pkg_class, v):
|
||||
"""Given a version specification (which may be a concrete version,
|
||||
range, etc.), determine if any package.py version declarations
|
||||
or externals define a version which satisfies it.
|
||||
|
||||
This is primarily for determining whether a version request (e.g.
|
||||
version preferences, which should not themselves define versions)
|
||||
refers to a defined version.
|
||||
|
||||
This function raises an exception if no satisfying versions are
|
||||
found.
|
||||
"""
|
||||
pkg_name = pkg_class.name
|
||||
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
|
||||
satisfying_versions.extend(x for x in self.possible_versions[pkg_name] if x.satisfies(v))
|
||||
if not satisfying_versions:
|
||||
raise spack.config.ConfigError(
|
||||
"Preference for version {0} does not match any version"
|
||||
" defined for {1} (in its package.py or any external)".format(str(v), pkg_name)
|
||||
)
|
||||
return satisfying_versions
|
||||
|
||||
def add_concrete_versions_from_specs(self, specs, origin):
|
||||
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
||||
@@ -2173,14 +2228,6 @@ def setup(self, driver, specs, reuse=None):
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
req_version_specs = _get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
|
||||
@@ -2208,6 +2255,14 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.provider_requirements()
|
||||
self.external_packages()
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
req_version_specs = self._get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
|
||||
|
||||
self.gen.h1("Package Constraints")
|
||||
for pkg in sorted(self.pkgs):
|
||||
self.gen.h2("Package rules: %s" % pkg)
|
||||
@@ -2254,55 +2309,78 @@ def literal_specs(self, specs):
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements(self):
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(self._specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements():
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(_specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
|
||||
def _specs_from_requires(pkg_name, section):
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
new_constraints = [spec_group["spec"]]
|
||||
else:
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
new_constraints = spec_group[key]
|
||||
spec_strs.extend(new_constraints)
|
||||
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
def _specs_from_requires(self, pkg_name, section):
|
||||
"""Collect specs from requirements which define versions (i.e. those that
|
||||
have a concrete version). Requirements can define *new* versions if
|
||||
they are included as part of an equivalence (hash=number) but not
|
||||
otherwise.
|
||||
"""
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is an object. The object can contain a single
|
||||
# "spec" constraint, or a list of them with "any_of" or
|
||||
# "one_of" policy.
|
||||
if "spec" in spec_group:
|
||||
new_constraints = [spec_group["spec"]]
|
||||
else:
|
||||
key = "one_of" if "one_of" in spec_group else "any_of"
|
||||
new_constraints = spec_group[key]
|
||||
spec_strs.extend(new_constraints)
|
||||
|
||||
version_specs = [x for x in extracted_specs if x.versions.concrete]
|
||||
for spec in version_specs:
|
||||
spec.attach_git_version_lookup()
|
||||
return version_specs
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
|
||||
version_specs = []
|
||||
for spec in extracted_specs:
|
||||
if spec.versions.concrete:
|
||||
# Note: this includes git versions
|
||||
version_specs.append(spec)
|
||||
continue
|
||||
|
||||
# Prefer spec's name if it exists, in case the spec is
|
||||
# requiring a specific implementation inside of a virtual section
|
||||
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||
pkg_class = spack.repo.path.get_pkg_class(spec.name or pkg_name)
|
||||
satisfying_versions = self._check_for_defined_matching_versions(
|
||||
pkg_class, spec.versions
|
||||
)
|
||||
|
||||
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
|
||||
# will end up here
|
||||
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
|
||||
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
|
||||
version_specs.extend(vspecs)
|
||||
|
||||
for spec in version_specs:
|
||||
spec.attach_git_version_lookup()
|
||||
return version_specs
|
||||
|
||||
|
||||
class SpecBuilder(object):
|
||||
@@ -2422,10 +2500,15 @@ def depends_on(self, pkg, dep, type):
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
"""Order compiler flags on specs in predefined order.
|
||||
@@ -2503,6 +2586,8 @@ def sort_fn(function_tuple):
|
||||
return (-2, 0)
|
||||
elif name == "external_spec_selected":
|
||||
return (0, 0) # note out of order so this goes last
|
||||
elif name == "virtual_on_edge":
|
||||
return (1, 0)
|
||||
else:
|
||||
return (-1, 0)
|
||||
|
||||
@@ -2758,12 +2843,13 @@ class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
|
||||
def __init__(self, provided, conflicts):
|
||||
indented = [" %s\n" % conflict for conflict in conflicts]
|
||||
error_msg = "".join(indented)
|
||||
msg = "Spack concretizer internal error. Please submit a bug report"
|
||||
msg += "\n Please include the command, environment if applicable,"
|
||||
msg += "\n and the following error message."
|
||||
msg = "\n %s is unsatisfiable, errors are:\n%s" % (provided, error_msg)
|
||||
msg = (
|
||||
"Spack concretizer internal error. Please submit a bug report and include the "
|
||||
"command, environment if applicable and the following error message."
|
||||
f"\n {provided} is unsatisfiable, errors are:"
|
||||
)
|
||||
|
||||
msg += "".join([f"\n {conflict}" for conflict in conflicts])
|
||||
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
|
||||
|
||||
@@ -300,6 +300,11 @@ attr("depends_on", Package, Provider, Type)
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
attr("virtual_on_edge", Package, Provider, Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
attr("virtual_node", Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
|
||||
@@ -50,6 +50,7 @@
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import enum
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
@@ -170,7 +171,17 @@
|
||||
)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
SPECFILE_FORMAT_VERSION = 4
|
||||
|
||||
|
||||
# InstallStatus is used to map install statuses to symbols for display
|
||||
# Options are artificially disjoint for dispay purposes
|
||||
class InstallStatus(enum.Enum):
|
||||
installed = "@g{[+]} "
|
||||
upstream = "@g{[^]} "
|
||||
external = "@g{[e]} "
|
||||
absent = "@K{ - } "
|
||||
missing = "@r{[-]} "
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -679,6 +690,16 @@ def from_dict(d):
|
||||
d = d["compiler"]
|
||||
return CompilerSpec(d["name"], vn.VersionList.from_dict(d))
|
||||
|
||||
@property
|
||||
def display_str(self):
|
||||
"""Equivalent to {compiler.name}{@compiler.version} for Specs, without extra
|
||||
@= for readability."""
|
||||
if self.concrete:
|
||||
return f"{self.name}@{self.version}"
|
||||
elif self.versions != vn.any_version:
|
||||
return f"{self.name}@{self.versions}"
|
||||
return self.name
|
||||
|
||||
def __str__(self):
|
||||
out = self.name
|
||||
if self.versions and self.versions != vn.any_version:
|
||||
@@ -704,47 +725,81 @@ class DependencySpec:
|
||||
parent: starting node of the edge
|
||||
spec: ending node of the edge.
|
||||
deptypes: list of strings, representing dependency relationships.
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "deptypes"
|
||||
__slots__ = "parent", "spec", "parameters"
|
||||
|
||||
def __init__(self, parent: "Spec", spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def __init__(
|
||||
self,
|
||||
parent: "Spec",
|
||||
spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.deptypes = dp.canonical_deptype(deptypes)
|
||||
self.parameters = {
|
||||
"deptypes": dp.canonical_deptype(deptypes),
|
||||
"virtuals": tuple(sorted(set(virtuals))),
|
||||
}
|
||||
|
||||
def update_deptypes(self, deptypes: dp.DependencyArgument) -> bool:
|
||||
deptypes = set(deptypes)
|
||||
deptypes.update(self.deptypes)
|
||||
deptypes = tuple(sorted(deptypes))
|
||||
changed = self.deptypes != deptypes
|
||||
@property
|
||||
def deptypes(self) -> Tuple[str, ...]:
|
||||
return self.parameters["deptypes"]
|
||||
|
||||
self.deptypes = deptypes
|
||||
return changed
|
||||
@property
|
||||
def virtuals(self) -> Tuple[str, ...]:
|
||||
return self.parameters["virtuals"]
|
||||
|
||||
def _update_edge_multivalued_property(
|
||||
self, property_name: str, value: Tuple[str, ...]
|
||||
) -> bool:
|
||||
current = self.parameters[property_name]
|
||||
update = set(current) | set(value)
|
||||
update = tuple(sorted(update))
|
||||
changed = current != update
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
self.parameters[property_name] = update
|
||||
return True
|
||||
|
||||
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
return self._update_edge_multivalued_property("deptypes", deptypes)
|
||||
|
||||
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
||||
"""Update the list of provided virtuals"""
|
||||
return self._update_edge_multivalued_property("virtuals", virtuals)
|
||||
|
||||
def copy(self) -> "DependencySpec":
|
||||
return DependencySpec(self.parent, self.spec, deptypes=self.deptypes)
|
||||
|
||||
def add_type(self, type: dp.DependencyArgument):
|
||||
self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
|
||||
"""Return a copy of this edge"""
|
||||
return DependencySpec(
|
||||
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
|
||||
)
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.parent.name if self.parent else None
|
||||
yield self.spec.name if self.spec else None
|
||||
yield self.deptypes
|
||||
yield self.virtuals
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "%s %s--> %s" % (
|
||||
self.parent.name if self.parent else None,
|
||||
self.deptypes,
|
||||
self.spec.name if self.spec else None,
|
||||
)
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
return DependencySpec(parent=self.spec, spec=self.parent, deptypes=self.deptypes)
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
|
||||
)
|
||||
|
||||
|
||||
class CompilerFlag(str):
|
||||
@@ -1565,10 +1620,12 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, deptypes=deptypes)
|
||||
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
@@ -1586,34 +1643,58 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
"""
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
msg = (
|
||||
'cannot add a dependency on "{0.spec}" of {1} type '
|
||||
'when the "{0.parent}" has the edge {0!s} already'
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these types {str(deptypes)}"
|
||||
)
|
||||
)
|
||||
raise spack.error.SpecError(msg.format(edge, deptypes))
|
||||
|
||||
if any(v in edge.virtuals for v in virtuals):
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {str(virtuals)}"
|
||||
)
|
||||
)
|
||||
|
||||
if has_errors:
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.add_type(deptypes)
|
||||
edge.update_deptypes(deptypes=deptypes)
|
||||
edge.update_virtuals(virtuals=virtuals)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -1730,14 +1811,14 @@ def traverse_edges(self, **kwargs):
|
||||
def short_spec(self):
|
||||
"""Returns a version of the spec with the dependencies hashed
|
||||
instead of completely enumerated."""
|
||||
spec_format = "{name}{@version}{%compiler}"
|
||||
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||
return self.format(spec_format)
|
||||
|
||||
@property
|
||||
def cshort_spec(self):
|
||||
"""Returns an auto-colorized version of ``self.short_spec``."""
|
||||
spec_format = "{name}{@version}{%compiler}"
|
||||
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
|
||||
spec_format += "{variants}{arch=architecture}{/hash:7}"
|
||||
return self.cformat(spec_format)
|
||||
|
||||
@@ -1886,12 +1967,12 @@ def lookup_hash(self):
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=())
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=())
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2026,8 +2107,14 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
name_tuple = ("name", name)
|
||||
for dspec in edges_for_name:
|
||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||
type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
|
||||
deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
|
||||
parameters_tuple = (
|
||||
"parameters",
|
||||
syaml.syaml_dict(
|
||||
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
|
||||
),
|
||||
)
|
||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
||||
deps_list.append(syaml.syaml_dict(ordered_entries))
|
||||
d["dependencies"] = deps_list
|
||||
|
||||
# Name is included in case this is replacing a virtual.
|
||||
@@ -2351,7 +2438,7 @@ def spec_and_dependency_types(s):
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types)
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2369,8 +2456,10 @@ def from_dict(data):
|
||||
spec = SpecfileV1.load(data)
|
||||
elif int(data["spec"]["_meta"]["version"]) == 2:
|
||||
spec = SpecfileV2.load(data)
|
||||
else:
|
||||
elif int(data["spec"]["_meta"]["version"]) == 3:
|
||||
spec = SpecfileV3.load(data)
|
||||
else:
|
||||
spec = SpecfileV4.load(data)
|
||||
|
||||
# Any git version should
|
||||
for s in spec.traverse():
|
||||
@@ -2519,6 +2608,7 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
|
||||
def _replace_with(self, concrete):
|
||||
"""Replace this virtual spec with a concrete spec."""
|
||||
assert self.virtual
|
||||
virtuals = (self.name,)
|
||||
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
|
||||
dependent = dep_spec.parent
|
||||
deptypes = dep_spec.deptypes
|
||||
@@ -2529,7 +2619,11 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes=deptypes)
|
||||
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
|
||||
else:
|
||||
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
|
||||
virtuals=virtuals
|
||||
)
|
||||
|
||||
def _expand_virtual_packages(self, concretizer):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
@@ -2789,11 +2883,11 @@ def inject_patches_variant(root):
|
||||
# Also record all patches required on dependencies by
|
||||
# depends_on(..., patch=...)
|
||||
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
|
||||
pkg_deps = dspec.parent.package_class.dependencies
|
||||
if dspec.spec.name not in pkg_deps:
|
||||
if dspec.spec.concrete:
|
||||
continue
|
||||
|
||||
if dspec.spec.concrete:
|
||||
pkg_deps = dspec.parent.package_class.dependencies
|
||||
if dspec.spec.name not in pkg_deps:
|
||||
continue
|
||||
|
||||
patches = []
|
||||
@@ -3170,7 +3264,9 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
virtuals = ()
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
virtuals = (dep.name,)
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
if provider:
|
||||
@@ -3226,7 +3322,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type)
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3563,15 +3659,20 @@ def _constrain_dependencies(self, other):
|
||||
changed |= edges_from_name[0].update_deptypes(
|
||||
other._dependencies[name][0].deptypes
|
||||
)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
deptypes=dep_spec_copy.deptypes,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
@@ -3955,7 +4056,7 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
@@ -4323,7 +4424,7 @@ def write_attribute(spec, attribute, color):
|
||||
|
||||
if callable(current):
|
||||
raise SpecFormatStringError("Attempted to format callable object")
|
||||
if not current:
|
||||
if current is None:
|
||||
# We're not printing anything
|
||||
return
|
||||
|
||||
@@ -4391,12 +4492,20 @@ def __str__(self):
|
||||
def install_status(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
if not self.concrete:
|
||||
return None
|
||||
try:
|
||||
record = spack.store.db.get_record(self)
|
||||
return record.installed
|
||||
except KeyError:
|
||||
return None
|
||||
return InstallStatus.absent
|
||||
|
||||
if self.external:
|
||||
return InstallStatus.external
|
||||
|
||||
upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
|
||||
if not record:
|
||||
return InstallStatus.absent
|
||||
elif upstream and record.installed:
|
||||
return InstallStatus.upstream
|
||||
elif record.installed:
|
||||
return InstallStatus.installed
|
||||
else:
|
||||
return InstallStatus.missing
|
||||
|
||||
def _installed_explicitly(self):
|
||||
"""Helper for tree to print DB install status."""
|
||||
@@ -4410,7 +4519,10 @@ def _installed_explicitly(self):
|
||||
|
||||
def tree(self, **kwargs):
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation."""
|
||||
with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
"""
|
||||
color = kwargs.pop("color", clr.get_color_when())
|
||||
depth = kwargs.pop("depth", False)
|
||||
hashes = kwargs.pop("hashes", False)
|
||||
@@ -4442,14 +4554,12 @@ def tree(self, **kwargs):
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if node.installed_upstream:
|
||||
out += clr.colorize("@g{[^]} ", color=color)
|
||||
elif status is None:
|
||||
out += clr.colorize("@K{ - } ", color=color) # !installed
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color) # installed
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color) # missing
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
|
||||
@@ -4625,12 +4735,16 @@ def from_self(name, transitive):
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
@@ -4720,11 +4834,40 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
|
||||
merged_spec._add_dependency(
|
||||
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
return merged_spec
|
||||
|
||||
|
||||
def reconstruct_virtuals_on_edges(spec):
|
||||
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
|
||||
|
||||
Args:
|
||||
spec: spec on which we want to reconstruct virtuals
|
||||
"""
|
||||
# Collect all possible virtuals
|
||||
possible_virtuals = set()
|
||||
for node in spec.traverse():
|
||||
try:
|
||||
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
|
||||
except Exception as e:
|
||||
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
|
||||
continue
|
||||
|
||||
# Assume all incoming edges to provider are marked with virtuals=
|
||||
for vspec in possible_virtuals:
|
||||
try:
|
||||
provider = spec[vspec]
|
||||
except KeyError:
|
||||
# Virtual not in the DAG
|
||||
continue
|
||||
|
||||
for edge in provider.edges_from_dependents():
|
||||
edge.update_virtuals([vspec])
|
||||
|
||||
|
||||
class SpecfileReaderBase:
|
||||
@classmethod
|
||||
def from_node_dict(cls, node):
|
||||
@@ -4808,7 +4951,7 @@ def _load(cls, data):
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
|
||||
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
@@ -4839,8 +4982,10 @@ def _load(cls, data):
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
|
||||
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(
|
||||
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
@@ -4874,9 +5019,10 @@ def load(cls, data):
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
|
||||
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4905,18 +5051,20 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
virtuals = []
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
result = cls._load(data)
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4950,7 +5098,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type))
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4970,6 +5118,20 @@ class SpecfileV3(SpecfileV2):
|
||||
pass
|
||||
|
||||
|
||||
class SpecfileV4(SpecfileV2):
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash = elt[hash.name]
|
||||
deptypes = elt["parameters"]["deptypes"]
|
||||
hash_type = hash.name
|
||||
virtuals = elt["parameters"]["virtuals"]
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
return cls._load(data)
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
"""Cache for Specs that uses a spec_like as key, and computes lazily
|
||||
the corresponding value ``Spec(spec_like``.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import textwrap
|
||||
from typing import List
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -66,17 +66,17 @@ def to_dict(self):
|
||||
return dict(d)
|
||||
|
||||
|
||||
def make_environment(dirs=None):
|
||||
"""Returns an configured environment for template rendering."""
|
||||
@llnl.util.lang.memoized
|
||||
def make_environment(dirs: Optional[Tuple[str, ...]] = None):
|
||||
"""Returns a configured environment for template rendering."""
|
||||
# Import at this scope to avoid slowing Spack startup down
|
||||
import jinja2
|
||||
|
||||
if dirs is None:
|
||||
# Default directories where to search for templates
|
||||
builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
|
||||
extensions = spack.extensions.get_template_dirs()
|
||||
dirs = [canonicalize_path(d) for d in itertools.chain(builtins, extensions)]
|
||||
|
||||
# avoid importing this at the top level as it's used infrequently and
|
||||
# slows down startup a bit.
|
||||
import jinja2
|
||||
dirs = tuple(canonicalize_path(d) for d in itertools.chain(builtins, extensions))
|
||||
|
||||
# Loader for the templates
|
||||
loader = jinja2.FileSystemLoader(dirs)
|
||||
|
||||
@@ -115,9 +115,6 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
# This is essential, otherwise the cache will create weird side effects
|
||||
# that will compromise subsequent tests if compilers.yaml is modified
|
||||
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
@@ -204,12 +201,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
install_cmd("--no-cache", sy_spec.name)
|
||||
|
||||
# Create a buildache
|
||||
buildcache_cmd("push", "-au", "-d", mirror_dir, cspec.name, sy_spec.name)
|
||||
buildcache_cmd("push", "-au", mirror_dir, cspec.name, sy_spec.name)
|
||||
# Test force overwrite create buildcache (-f option)
|
||||
buildcache_cmd("push", "-auf", "-d", mirror_dir, cspec.name)
|
||||
buildcache_cmd("push", "-auf", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
|
||||
@@ -217,13 +214,13 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Test installing from build caches
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-au", cspec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-afu", cspec.name)
|
||||
buildcache_cmd("install", "-fu", cspec.name)
|
||||
|
||||
buildcache_cmd("keys", "-f")
|
||||
buildcache_cmd("list")
|
||||
@@ -249,35 +246,10 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
|
||||
# Install some packages with dependent packages
|
||||
# test install in non-default install path scheme
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# Test force install in non-default install path scheme
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures("default_config", "cache_directory", "install_dir_default_layout")
|
||||
def test_relative_rpaths_create_default_layout(mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with relative
|
||||
rpaths into the default directory layout scheme.
|
||||
"""
|
||||
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install 'corge' without using a cache
|
||||
install_cmd("--no-cache", cspec.name)
|
||||
|
||||
# Create build cache with relative rpaths
|
||||
buildcache_cmd("push", "-aur", "-d", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -294,19 +266,19 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install buildcache created with relativized rpaths
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Install build cache
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -323,7 +295,7 @@ def test_relative_rpaths_install_nondefault(mirror_dir):
|
||||
cspec = Spec("corge").concretized()
|
||||
|
||||
# Test install in non-default install path scheme and relative path
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
def test_push_and_fetch_keys(mock_gnupghome):
|
||||
@@ -404,7 +376,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
install_cmd(s.name)
|
||||
|
||||
# Put installed package in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url)
|
||||
|
||||
@@ -433,8 +405,8 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
install_cmd("--no-cache", s.name)
|
||||
|
||||
# Create a buildcache and update index
|
||||
buildcache_cmd("push", "-uad", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("push", "-ua", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
# Check package and dependency in buildcache
|
||||
cache_list = buildcache_cmd("list", "--allarch")
|
||||
@@ -446,7 +418,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
os.remove(*libelf_files)
|
||||
|
||||
# Update index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
with spack.config.override("config:binary_index_ttl", 0):
|
||||
# Check dependency not in buildcache
|
||||
@@ -522,10 +494,10 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
|
||||
# Create a buildcache with the installed spec.
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir, old_spec_hash_str)
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd("-y", old_spec_hash_str)
|
||||
@@ -541,7 +513,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
assert new_spec.dag_hash() == old_spec.dag_hash()
|
||||
|
||||
# Install package from buildcache
|
||||
buildcache_cmd("install", "-a", "-u", "-f", new_spec.name)
|
||||
buildcache_cmd("install", "-u", "-f", new_spec.name)
|
||||
|
||||
# Continue blowing away caches
|
||||
bindist.clear_spec_cache()
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.create
|
||||
@@ -12,8 +10,6 @@
|
||||
import spack.util.executable
|
||||
import spack.util.url as url_util
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope="function",
|
||||
|
||||
@@ -173,7 +173,7 @@ def wrapper_environment(working_env):
|
||||
SPACK_DTAGS_TO_ADD="--disable-new-dtags",
|
||||
SPACK_DTAGS_TO_STRIP="--enable-new-dtags",
|
||||
SPACK_COMPILER_FLAGS_KEEP="",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*",
|
||||
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
|
||||
):
|
||||
yield
|
||||
|
||||
@@ -278,8 +278,8 @@ def test_ld_flags(wrapper_environment, wrapper_flags):
|
||||
ld,
|
||||
test_args,
|
||||
["ld"]
|
||||
+ spack_ldflags
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["--disable-new-dtags"]
|
||||
+ test_rpaths
|
||||
@@ -293,10 +293,10 @@ def test_cpp_flags(wrapper_environment, wrapper_flags):
|
||||
cpp,
|
||||
test_args,
|
||||
["cpp"]
|
||||
+ spack_cppflags
|
||||
+ test_include_paths
|
||||
+ test_library_paths
|
||||
+ test_args_without_paths,
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags,
|
||||
)
|
||||
|
||||
|
||||
@@ -306,10 +306,14 @@ def test_cc_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -320,10 +324,13 @@ def test_cxx_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_cppflags
|
||||
+ spack_cxxflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
@@ -334,10 +341,14 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
|
||||
test_args,
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ test_include_paths
|
||||
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
|
||||
+ test_library_paths
|
||||
+ ["-Wl,--disable-new-dtags"]
|
||||
+ test_wl_rpaths
|
||||
+ test_args_without_paths
|
||||
+ spack_fflags
|
||||
+ spack_cppflags
|
||||
+ spack_ldflags
|
||||
+ common_compile_args
|
||||
+ spack_ldlibs,
|
||||
)
|
||||
|
||||
|
||||
@@ -46,31 +46,6 @@ def test_import_signing_key(mock_gnupghome):
|
||||
ci.import_signing_key(signing_key)
|
||||
|
||||
|
||||
def test_configure_compilers(mutable_config):
|
||||
def assert_missing(config):
|
||||
assert (
|
||||
"install_missing_compilers" not in config
|
||||
or config["install_missing_compilers"] is False
|
||||
)
|
||||
|
||||
def assert_present(config):
|
||||
assert (
|
||||
"install_missing_compilers" in config and config["install_missing_compilers"] is True
|
||||
)
|
||||
|
||||
original_config = spack.config.get("config")
|
||||
assert_missing(original_config)
|
||||
|
||||
ci.configure_compilers("FIND_ANY", scope="site")
|
||||
|
||||
second_config = spack.config.get("config")
|
||||
assert_missing(second_config)
|
||||
|
||||
ci.configure_compilers("INSTALL_MISSING")
|
||||
last_config = spack.config.get("config")
|
||||
assert_present(last_config)
|
||||
|
||||
|
||||
class FakeWebResponder(object):
|
||||
def __init__(self, response_code=200, content_to_read=[]):
|
||||
self._resp_code = response_code
|
||||
@@ -248,7 +223,7 @@ def test_ci_workarounds():
|
||||
fake_root_spec = "x" * 544
|
||||
fake_spack_ref = "x" * 40
|
||||
|
||||
common_variables = {"SPACK_COMPILER_ACTION": "NONE", "SPACK_IS_PR_PIPELINE": "False"}
|
||||
common_variables = {"SPACK_IS_PR_PIPELINE": "False"}
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
@@ -291,7 +266,7 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index --mirror-url s3://mirror",
|
||||
"script": "spack buildcache update-index s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -34,12 +35,18 @@ def test_build_env_requires_a_spec(args):
|
||||
_out_file = "env.out"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("shell", ["pwsh", "bat"] if sys.platform == "win32" else ["bash"])
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
def test_dump(tmpdir):
|
||||
def test_dump(shell_as, shell, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--dump", _out_file, "zlib")
|
||||
with open(_out_file) as f:
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
if shell == "pwsh":
|
||||
assert any(line.startswith("$Env:PATH") for line in f.readlines())
|
||||
elif shell == "bat":
|
||||
assert any(line.startswith('set "PATH=') for line in f.readlines())
|
||||
else:
|
||||
assert any(line.startswith("PATH=") for line in f.readlines())
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
|
||||
@@ -85,7 +85,7 @@ def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -105,7 +105,7 @@ def tests_buildcache_create_env(
|
||||
add(pkg)
|
||||
install()
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -118,7 +118,7 @@ def test_buildcache_create_fails_on_noargs(tmpdir):
|
||||
"""Ensure that buildcache create fails when given no args or
|
||||
environment."""
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
|
||||
def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
@@ -127,7 +127,7 @@ def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monk
|
||||
|
||||
tmpdir.chmod(0)
|
||||
with pytest.raises(OSError) as error:
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", "trivial-install-test-package")
|
||||
buildcache("push", "--unsigned", str(tmpdir), "trivial-install-test-package")
|
||||
assert error.value.errno == errno.EACCES
|
||||
tmpdir.chmod(0o700)
|
||||
|
||||
@@ -159,11 +159,11 @@ def test_update_key_index(
|
||||
# Put installed package in the buildcache, which, because we're signing
|
||||
# it, should result in the public key getting pushed to the buildcache
|
||||
# as well.
|
||||
buildcache("push", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache("push", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
# Now make sure that when we pass the "--keys" argument to update-index
|
||||
# it causes the index to get update.
|
||||
buildcache("update-index", "--keys", "-d", mirror_dir.strpath)
|
||||
buildcache("update-index", "--keys", mirror_dir.strpath)
|
||||
|
||||
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
|
||||
|
||||
@@ -213,27 +213,25 @@ def verify_mirror_contents():
|
||||
# Install a package and put it in the buildcache
|
||||
s = Spec(out_env_pkg).concretized()
|
||||
install(s.name)
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, s.name)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, s.name)
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
add(in_env_pkg)
|
||||
install()
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, in_env_pkg)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, in_env_pkg)
|
||||
|
||||
# Now run the spack buildcache sync command with all the various options
|
||||
# for specifying mirrors
|
||||
|
||||
# Use urls to specify mirrors
|
||||
buildcache(
|
||||
"sync", "--src-mirror-url", src_mirror_url, "--dest-mirror-url", dest_mirror_url
|
||||
)
|
||||
buildcache("sync", src_mirror_url, dest_mirror_url)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
# Use local directory paths to specify fs locations
|
||||
buildcache("sync", "--src-directory", src_mirror_dir, "--dest-directory", dest_mirror_dir)
|
||||
buildcache("sync", src_mirror_dir, dest_mirror_dir)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
@@ -242,7 +240,7 @@ def verify_mirror_contents():
|
||||
mirror("add", "src", src_mirror_url)
|
||||
mirror("add", "dest", dest_mirror_url)
|
||||
|
||||
buildcache("sync", "--src-mirror-name", "src", "--dest-mirror-name", "dest")
|
||||
buildcache("sync", "src", "dest")
|
||||
|
||||
verify_mirror_contents()
|
||||
|
||||
@@ -260,7 +258,7 @@ def test_buildcache_create_install(
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -324,12 +322,12 @@ def fake_push(node, push_url, options):
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
|
||||
|
||||
buildcache_create_args = ["create", "-d", str(tmpdir), "--unsigned"]
|
||||
buildcache_create_args = ["create", "--unsigned"]
|
||||
|
||||
if things_to_install != "":
|
||||
buildcache_create_args.extend(["--only", things_to_install])
|
||||
|
||||
buildcache_create_args.extend([slash_hash])
|
||||
buildcache_create_args.extend([str(tmpdir), slash_hash])
|
||||
|
||||
buildcache(*buildcache_create_args)
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.ci as ci
|
||||
import spack.compilers as compilers
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
@@ -30,7 +29,7 @@
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.ci import schema as ci_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.spec import Spec
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
config_cmd = spack.main.SpackCommand("config")
|
||||
@@ -163,8 +162,6 @@ def test_ci_generate_with_env(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- cmake@3.4.3
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
@@ -179,9 +176,6 @@ def test_ci_generate_with_env(
|
||||
mirrors:
|
||||
some-mirror: {0}
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -221,16 +215,10 @@ def test_ci_generate_with_env(
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
found_spec = False
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(bootstrap)" in ci_key:
|
||||
found_spec = True
|
||||
assert "cmake" in ci_key
|
||||
assert found_spec
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 6
|
||||
assert len(yaml_contents["stages"]) == 5
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][5] == "stage-rebuild-index"
|
||||
assert yaml_contents["stages"][4] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
@@ -244,155 +232,6 @@ def test_ci_generate_with_env(
|
||||
assert artifacts_root == "jobs_scratch_dir"
|
||||
|
||||
|
||||
def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
|
||||
"""Validate the needs graph in the generate CI"""
|
||||
|
||||
# TODO: Fix the logic to catch errors where expected packages/needs are not
|
||||
# found.
|
||||
for job_name, job_def in yaml_contents.items():
|
||||
for needs_def_name, needs_list in needs_graph.items():
|
||||
if job_name.startswith(needs_def_name):
|
||||
# check job needs against the expected needs definition
|
||||
j_needs = job_def["needs"]
|
||||
assert all(
|
||||
[
|
||||
job_needs["job"][: job_needs["job"].index("/")] in needs_list
|
||||
for job_needs in j_needs
|
||||
]
|
||||
)
|
||||
assert all(
|
||||
[nl in [n["job"][: n["job"].index("/")] for n in j_needs] for nl in needs_list]
|
||||
)
|
||||
assert all([job_needs["artifacts"] == artifacts for job_needs in j_needs])
|
||||
break
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_gcc(
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler and use it as the
|
||||
compiler for a spec in the environment"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@=3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
"""
|
||||
)
|
||||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(specs) libelf"],
|
||||
"(specs) dyninst": ["(bootstrap) gcc", "(specs) libelf", "(specs) libdwarf"],
|
||||
}
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
_validate_needs_graph(yaml_contents, needs_graph, False)
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_artifacts_buildcache(
|
||||
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
|
||||
):
|
||||
"""Test that we can bootstrap a compiler when artifacts buildcache
|
||||
is turned on"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@3.0
|
||||
specs:
|
||||
- dyninst%gcc@=3.0
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- any-job:
|
||||
tags:
|
||||
- donotcare
|
||||
enable-artifacts-buildcache: True
|
||||
"""
|
||||
)
|
||||
|
||||
needs_graph = {
|
||||
"(bootstrap) conflict": [],
|
||||
"(bootstrap) gcc": ["(bootstrap) conflict"],
|
||||
"(specs) libelf": ["(bootstrap) gcc", "(bootstrap) conflict"],
|
||||
"(specs) libdwarf": ["(bootstrap) gcc", "(bootstrap) conflict", "(specs) libelf"],
|
||||
"(specs) dyninst": [
|
||||
"(bootstrap) gcc",
|
||||
"(bootstrap) conflict",
|
||||
"(specs) libelf",
|
||||
"(specs) libdwarf",
|
||||
],
|
||||
}
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
_validate_needs_graph(yaml_contents, needs_graph, True)
|
||||
|
||||
|
||||
def test_ci_generate_with_env_missing_section(
|
||||
tmpdir,
|
||||
working_env,
|
||||
@@ -889,7 +728,7 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env):
|
||||
"SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash,
|
||||
"SPACK_JOB_SPEC_PKG_NAME": pkg_name,
|
||||
"SPACK_COMPILER_ACTION": "NONE",
|
||||
"SPACK_CDASH_BUILD_NAME": "(specs) {0}".format(pkg_name),
|
||||
"SPACK_CDASH_BUILD_NAME": pkg_name,
|
||||
"SPACK_REMOTE_MIRROR_URL": rebuild_env.mirror_url,
|
||||
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
|
||||
"CI_JOB_URL": rebuild_env.ci_job_url,
|
||||
@@ -1055,7 +894,7 @@ def test_ci_nothing_to_rebuild(
|
||||
)
|
||||
|
||||
install_cmd("archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", "--mirror-url", mirror_url, "archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", mirror_url, "archive-files")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
@@ -1155,8 +994,8 @@ def test_ci_generate_mirror_override(
|
||||
second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml"))
|
||||
with ev.read("test"):
|
||||
install_cmd()
|
||||
buildcache_cmd("push", "-u", "--mirror-url", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url, output=str)
|
||||
buildcache_cmd("push", "-u", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", mirror_url, output=str)
|
||||
|
||||
# This generate should not trigger a rebuild of patchelf, since it's in
|
||||
# the main mirror referenced in the environment.
|
||||
@@ -1283,7 +1122,7 @@ def test_push_mirror_contents(
|
||||
found_spec_job = False
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(specs) patchelf" in ci_key:
|
||||
if "patchelf" in ci_key:
|
||||
the_elt = yaml_contents[ci_key]
|
||||
assert "variables" in the_elt
|
||||
job_vars = the_elt["variables"]
|
||||
@@ -1297,7 +1136,7 @@ def test_push_mirror_contents(
|
||||
mirror_cmd("rm", "test-ci")
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url)
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
index_path = os.path.join(buildcache_path, "index.json")
|
||||
with open(index_path) as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
@@ -1457,7 +1296,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(specs) a" in ci_key:
|
||||
if ci_key.startswith("a"):
|
||||
# Make sure a's attributes override variables, and all the
|
||||
# scripts. Also, make sure the 'toplevel' tag doesn't
|
||||
# appear twice, but that a's specific extra tag does appear
|
||||
@@ -1477,7 +1316,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["script"][0] == "custom main step"
|
||||
assert len(the_elt["after_script"]) == 1
|
||||
assert the_elt["after_script"][0] == "custom post step one"
|
||||
if "(specs) dependency-install" in ci_key:
|
||||
if "dependency-install" in ci_key:
|
||||
# Since the dependency-install match omits any
|
||||
# runner-attributes, make sure it inherited all the
|
||||
# top-level attributes.
|
||||
@@ -1495,7 +1334,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["script"][0] == "main step"
|
||||
assert len(the_elt["after_script"]) == 1
|
||||
assert the_elt["after_script"][0] == "post step one"
|
||||
if "(specs) flatten-deps" in ci_key:
|
||||
if "flatten-deps" in ci_key:
|
||||
# The flatten-deps match specifies that we keep the two
|
||||
# top level variables, but add a third specifc one. It
|
||||
# also adds a custom tag which should be combined with
|
||||
@@ -1554,9 +1393,10 @@ def test_ci_generate_with_workarounds(
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_one = False
|
||||
non_rebuild_keys = ["workflow", "stages", "variables", "rebuild-index"]
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("(specs) "):
|
||||
if ci_key not in non_rebuild_keys:
|
||||
found_one = True
|
||||
job_obj = yaml_contents[ci_key]
|
||||
assert "needs" not in job_obj
|
||||
@@ -1613,7 +1453,7 @@ def test_ci_rebuild_index(
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--add", "--keep-stage", "-f", json_path)
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
@@ -1623,140 +1463,6 @@ def test_ci_rebuild_index(
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
|
||||
def test_ci_generate_bootstrap_prune_dag(
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
mock_fetch,
|
||||
mock_archive,
|
||||
mutable_config,
|
||||
monkeypatch,
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
ci_base_environment,
|
||||
):
|
||||
"""Test compiler bootstrapping with DAG pruning. Specifically, make
|
||||
sure that if we detect the bootstrapped compiler needs to be rebuilt,
|
||||
we ensure the spec we want to build with that compiler is scheduled
|
||||
for rebuild as well."""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
|
||||
# Install a compiler, because we want to put it in a buildcache
|
||||
install_cmd("gcc@=12.2.0%gcc@10.2.1")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
spack.config.set("config:install_missing_compilers", True)
|
||||
assert CompilerSpec("gcc@=12.2.0") not in compilers.all_compiler_specs()
|
||||
|
||||
# Configure the mirror where we put that buildcache w/ the compiler
|
||||
mirror_cmd("add", "test-mirror", mirror_url)
|
||||
|
||||
install_cmd("--no-check-signature", "b%gcc@=12.2.0")
|
||||
|
||||
# Put spec built with installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "b%gcc@12.2.0")
|
||||
|
||||
# Now uninstall the spec
|
||||
uninstall_cmd("-y", "b%gcc@12.2.0")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- gcc@=12.2.0%gcc@10.2.1
|
||||
specs:
|
||||
- b%gcc@12.2.0
|
||||
mirrors:
|
||||
atestm: {0}
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- arch=test-debian6-x86_64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
- match:
|
||||
- arch=test-debian6-aarch64
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
- match:
|
||||
- arch=test-debian6-m1
|
||||
build-job:
|
||||
tags:
|
||||
- meh
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
)
|
||||
|
||||
# Without this monkeypatch, pipeline generation process would think that
|
||||
# nothing in the environment needs rebuilding. With the monkeypatch, the
|
||||
# process sees the compiler as needing a rebuild, which should then result
|
||||
# in the specs built with that compiler needing a rebuild too.
|
||||
def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec.name == "gcc":
|
||||
return []
|
||||
else:
|
||||
return [{"spec": spec, "mirror_url": mirror_url}]
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
yaml_contents = of.read()
|
||||
original_yaml_contents = syaml.load(yaml_contents)
|
||||
|
||||
# without the monkeypatch, everything appears up to date and no
|
||||
# rebuild jobs are generated.
|
||||
assert original_yaml_contents
|
||||
assert "no-specs-to-rebuild" in original_yaml_contents
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.binary_distribution, "get_mirrors_for_spec", fake_get_mirrors_for_spec
|
||||
)
|
||||
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
yaml_contents = of.read()
|
||||
new_yaml_contents = syaml.load(yaml_contents)
|
||||
|
||||
assert new_yaml_contents
|
||||
|
||||
# This 'needs' graph reflects that even though specs 'a' and 'b' do
|
||||
# not otherwise need to be rebuilt (thanks to DAG pruning), they
|
||||
# both end up in the generated pipeline because the compiler they
|
||||
# depend on is bootstrapped, and *does* need to be rebuilt.
|
||||
needs_graph = {"(bootstrap) gcc": [], "(specs) b": ["(bootstrap) gcc"]}
|
||||
|
||||
_validate_needs_graph(new_yaml_contents, needs_graph, False)
|
||||
|
||||
|
||||
def test_ci_get_stack_changed(mock_git_repo, monkeypatch):
|
||||
"""Test that we can detect the change to .gitlab-ci.yml in a
|
||||
mock spack git repo."""
|
||||
@@ -1828,7 +1534,7 @@ def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
generated_hashes = []
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("(specs)"):
|
||||
if "variables" in yaml_contents[ci_key]:
|
||||
generated_hashes.append(
|
||||
yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"]
|
||||
)
|
||||
@@ -2240,9 +1946,7 @@ def test_ci_reproduce(
|
||||
ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root)
|
||||
|
||||
target_name = spack.platforms.test.Test.default
|
||||
job_name = ci.get_job_name(
|
||||
"specs", False, job_spec, "test-debian6-%s" % target_name, None
|
||||
)
|
||||
job_name = ci.get_job_name(job_spec, "test-debian6-%s" % target_name, None)
|
||||
|
||||
repro_file = os.path.join(working_dir.strpath, "repro.json")
|
||||
repro_details = {
|
||||
@@ -2309,8 +2013,6 @@ def test_cmd_first_line():
|
||||
legacy_spack_yaml_contents = """
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- cmake@3.4.3
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
@@ -2325,9 +2027,6 @@ def test_cmd_first_line():
|
||||
mirrors:
|
||||
test-mirror: file:///some/fake/mirror
|
||||
{0}:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
@@ -2379,16 +2078,10 @@ def test_gitlab_ci_deprecated(
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_spec = False
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(bootstrap)" in ci_key:
|
||||
found_spec = True
|
||||
assert "cmake" in ci_key
|
||||
assert found_spec
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 6
|
||||
assert len(yaml_contents["stages"]) == 5
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][5] == "stage-rebuild-index"
|
||||
assert yaml_contents["stages"][4] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
|
||||
@@ -8,8 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.compilers
|
||||
import spack.main
|
||||
import spack.version
|
||||
@@ -18,124 +16,8 @@
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compiler_version():
|
||||
return "4.5.3"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_compiler_dir(tmpdir, mock_compiler_version):
|
||||
"""Return a directory containing a fake, but detectable compiler."""
|
||||
|
||||
tmpdir.ensure("bin", dir=True)
|
||||
bin_dir = tmpdir.join("bin")
|
||||
|
||||
gcc_path = bin_dir.join("gcc")
|
||||
gxx_path = bin_dir.join("g++")
|
||||
gfortran_path = bin_dir.join("gfortran")
|
||||
|
||||
gcc_path.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '%s'
|
||||
fi
|
||||
done
|
||||
"""
|
||||
% mock_compiler_version
|
||||
)
|
||||
|
||||
# Create some mock compilers in the temporary directory
|
||||
llnl.util.filesystem.set_executable(str(gcc_path))
|
||||
gcc_path.copy(gxx_path, mode=True)
|
||||
gcc_path.copy(gfortran_path, mode=True)
|
||||
|
||||
return str(tmpdir)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
echo "0.0.0"
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
# make a script to emulate apple gcc's version args
|
||||
with open("gcc", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
"""
|
||||
)
|
||||
os.chmod("gcc", 0o700)
|
||||
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version):
|
||||
# Compilers available by default.
|
||||
old_compilers = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[mock_compiler_dir], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
|
||||
# Ensure new compiler is in there
|
||||
new_compilers = set(spack.compilers.all_compiler_specs())
|
||||
new_compiler = new_compilers - old_compilers
|
||||
assert any(c.version == spack.version.Version(mock_compiler_version) for c in new_compiler)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clangdir(tmpdir):
|
||||
"""Create a directory with some dummy compiler scripts in it.
|
||||
def compilers_dir(mock_executable):
|
||||
"""Create a directory with some mock compiler scripts in it.
|
||||
|
||||
Scripts are:
|
||||
- clang
|
||||
@@ -145,11 +27,9 @@ def clangdir(tmpdir):
|
||||
- gfortran-8
|
||||
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
with open("clang", "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
#!/bin/sh
|
||||
clang_path = mock_executable(
|
||||
"clang",
|
||||
output="""
|
||||
if [ "$1" = "--version" ]; then
|
||||
echo "clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
@@ -159,12 +39,11 @@ def clangdir(tmpdir):
|
||||
echo "clang: error: no input files"
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
)
|
||||
shutil.copy("clang", "clang++")
|
||||
""",
|
||||
)
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++")
|
||||
|
||||
gcc_script = """\
|
||||
#!/bin/sh
|
||||
gcc_script = """
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "8"
|
||||
elif [ "$1" = "-dumpfullversion" ]; then
|
||||
@@ -178,120 +57,187 @@ def clangdir(tmpdir):
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
with open("gcc-8", "w") as f:
|
||||
f.write(gcc_script.format("gcc", "gcc-8"))
|
||||
with open("g++-8", "w") as f:
|
||||
f.write(gcc_script.format("g++", "g++-8"))
|
||||
with open("gfortran-8", "w") as f:
|
||||
f.write(gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
os.chmod("clang", 0o700)
|
||||
os.chmod("clang++", 0o700)
|
||||
os.chmod("gcc-8", 0o700)
|
||||
os.chmod("g++-8", 0o700)
|
||||
os.chmod("gfortran-8", 0o700)
|
||||
mock_executable("gcc-8", output=gcc_script.format("gcc", "gcc-8"))
|
||||
mock_executable("g++-8", output=gcc_script.format("g++", "g++-8"))
|
||||
mock_executable("gfortran-8", output=gcc_script.format("GNU Fortran", "gfortran-8"))
|
||||
|
||||
yield tmpdir
|
||||
return clang_path.parent
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
|
||||
is given.
|
||||
"""
|
||||
gcc_path = mock_executable("gcc", output='echo "0.0.0"')
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@=11.0.0" in output
|
||||
assert "gcc@=8.4.0" in output
|
||||
assert "gcc" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("17589")
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
|
||||
Clang with a few tweaks.
|
||||
"""
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output="""
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
""",
|
||||
)
|
||||
|
||||
os.environ["PATH"] = str(gcc_path.parent)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "gcc" not in output
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
# Duplicate "site" scope into "user" scope
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
spack.config.set("compilers", site_config, scope="user")
|
||||
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
"""Tests that we can add a compiler to configuration."""
|
||||
expected_version = "4.5.3"
|
||||
gcc_path = mock_executable(
|
||||
"gcc",
|
||||
output=f"""\
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = -dumpversion ]; then
|
||||
echo '{expected_version}'
|
||||
fi
|
||||
done
|
||||
""",
|
||||
)
|
||||
bin_dir = gcc_path.parent
|
||||
root_dir = bin_dir.parent
|
||||
|
||||
compilers_before_find = set(spack.compilers.all_compiler_specs())
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
compilers_after_find = set(spack.compilers.all_compiler_specs())
|
||||
|
||||
compilers_added_by_find = compilers_after_find - compilers_before_find
|
||||
assert len(compilers_added_by_find) == 1
|
||||
new_compiler = compilers_added_by_find.pop()
|
||||
assert new_compiler.version == spack.version.Version(expected_version)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
gfortran_path = str(clangdir.join("gfortran-8"))
|
||||
gfortran_path = str(compilers_dir / "gfortran-8")
|
||||
|
||||
assert clang["paths"] == {
|
||||
"cc": str(clangdir.join("clang")),
|
||||
"cxx": str(clangdir.join("clang++")),
|
||||
"cc": str(compilers_dir / "clang"),
|
||||
"cxx": str(compilers_dir / "clang++"),
|
||||
# we only auto-detect mixed clang on macos
|
||||
"f77": gfortran_path if sys.platform == "darwin" else None,
|
||||
"fc": gfortran_path if sys.platform == "darwin" else None,
|
||||
}
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("gcc-8")),
|
||||
"cxx": str(clangdir.join("g++-8")),
|
||||
"cc": str(compilers_dir / "gcc-8"),
|
||||
"cxx": str(compilers_dir / "g++-8"),
|
||||
"f77": gfortran_path,
|
||||
"fc": gfortran_path,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir):
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
|
||||
with clangdir.as_cwd():
|
||||
shutil.copy("clang", "clang-gpu")
|
||||
shutil.copy("clang++", "clang++-gpu")
|
||||
os.chmod("clang-gpu", 0o700)
|
||||
os.chmod("clang++-gpu", 0o700)
|
||||
clang_path = compilers_dir / "clang"
|
||||
shutil.copy(clang_path, clang_path.parent / "clang-gpu")
|
||||
shutil.copy(clang_path, clang_path.parent / "clang++-gpu")
|
||||
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@=11.0.0" in output
|
||||
assert "gcc@=8.4.0" in output
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
||||
assert clang["paths"]["cc"] == str(clangdir.join("clang"))
|
||||
assert clang["paths"]["cxx"] == str(clangdir.join("clang++"))
|
||||
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
|
||||
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Cannot execute bash \
|
||||
script on Windows",
|
||||
)
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we find compilers that come first in the PATH first"""
|
||||
|
||||
with clangdir.as_cwd():
|
||||
os.mkdir("first_in_path")
|
||||
shutil.copy("gcc-8", "first_in_path/gcc-8")
|
||||
shutil.copy("g++-8", "first_in_path/g++-8")
|
||||
shutil.copy("gfortran-8", "first_in_path/gfortran-8")
|
||||
|
||||
# the first_in_path folder should be searched first
|
||||
os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
|
||||
new_dir = compilers_dir / "first_in_path"
|
||||
new_dir.mkdir()
|
||||
for name in ("gcc-8", "g++-8", "gfortran-8"):
|
||||
shutil.copy(compilers_dir / name, new_dir / name)
|
||||
# Set PATH to have the new folder searched first
|
||||
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(clangdir.join("first_in_path", "gcc-8")),
|
||||
"cxx": str(clangdir.join("first_in_path", "g++-8")),
|
||||
"f77": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"fc": str(clangdir.join("first_in_path", "gfortran-8")),
|
||||
"cc": str(new_dir / "gcc-8"),
|
||||
"cxx": str(new_dir / "g++-8"),
|
||||
"f77": str(new_dir / "gfortran-8"),
|
||||
"fc": str(new_dir / "gfortran-8"),
|
||||
}
|
||||
|
||||
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, clangdir):
|
||||
# Spack should not automatically search for compilers when listing them and none
|
||||
# are available. And when stdout is not a tty like in tests, there should be no
|
||||
# output and no error exit code.
|
||||
os.environ["PATH"] = str(clangdir)
|
||||
def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Spack should not automatically search for compilers when listing them and none are
|
||||
available. And when stdout is not a tty like in tests, there should be no output and
|
||||
no error exit code.
|
||||
"""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
out = compiler("list")
|
||||
assert not out
|
||||
assert compiler.returncode == 0
|
||||
|
||||
@@ -32,7 +32,7 @@ def check_develop(self, env, spec, path=None):
|
||||
assert dev_specs_entry["spec"] == str(spec)
|
||||
|
||||
# check yaml representation
|
||||
yaml = ev.config_dict(env.manifest)
|
||||
yaml = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
assert spec.name in yaml["develop"]
|
||||
yaml_entry = yaml["develop"][spec.name]
|
||||
assert yaml_entry["spec"] == str(spec)
|
||||
|
||||
@@ -390,6 +390,19 @@ def test_remove_after_concretize():
|
||||
assert not any(s.name == "mpileaks" for s in env_specs)
|
||||
|
||||
|
||||
def test_remove_before_concretize():
|
||||
e = ev.create("test")
|
||||
e.unify = True
|
||||
|
||||
e.add("mpileaks")
|
||||
e.concretize()
|
||||
|
||||
e.remove("mpileaks")
|
||||
e.concretize()
|
||||
|
||||
assert not list(e.concretized_specs())
|
||||
|
||||
|
||||
def test_remove_command():
|
||||
env("create", "test")
|
||||
assert "test" in env("list")
|
||||
@@ -906,7 +919,7 @@ def test_env_config_precedence(environment_from_manifest):
|
||||
mpileaks:
|
||||
version: ["2.2"]
|
||||
libelf:
|
||||
version: ["0.8.11"]
|
||||
version: ["0.8.10"]
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -2675,7 +2688,7 @@ def test_modules_relative_to_views(environment_from_manifest, install_mockery, m
|
||||
|
||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||
modules_glob = "%s/modules/**/*" % e.path
|
||||
modules_glob = "%s/modules/**/*/*" % e.path
|
||||
modules = glob.glob(modules_glob)
|
||||
assert len(modules) == 1
|
||||
module = modules[0]
|
||||
@@ -2711,12 +2724,12 @@ def test_multiple_modules_post_env_hook(environment_from_manifest, install_mocke
|
||||
|
||||
spec = e.specs_by_hash[e.concretized_order[0]]
|
||||
view_prefix = e.default_view.get_projection_for_spec(spec)
|
||||
modules_glob = "%s/modules/**/*" % e.path
|
||||
modules_glob = "%s/modules/**/*/*" % e.path
|
||||
modules = glob.glob(modules_glob)
|
||||
assert len(modules) == 1
|
||||
module = modules[0]
|
||||
|
||||
full_modules_glob = "%s/full_modules/**/*" % e.path
|
||||
full_modules_glob = "%s/full_modules/**/*/*" % e.path
|
||||
full_modules = glob.glob(full_modules_glob)
|
||||
assert len(full_modules) == 1
|
||||
full_module = full_modules[0]
|
||||
@@ -3299,3 +3312,22 @@ def test_environment_created_in_users_location(mutable_config, tmpdir):
|
||||
assert dir_name in out
|
||||
assert env_dir in ev.root(dir_name)
|
||||
assert os.path.isdir(os.path.join(env_dir, dir_name))
|
||||
|
||||
|
||||
def test_environment_created_from_lockfile_has_view(mock_packages, tmpdir):
|
||||
"""When an env is created from a lockfile, a view should be generated for it"""
|
||||
env_a = str(tmpdir.join("a"))
|
||||
env_b = str(tmpdir.join("b"))
|
||||
|
||||
# Create an environment and install a package in it
|
||||
env("create", "-d", env_a)
|
||||
with ev.Environment(env_a):
|
||||
add("libelf")
|
||||
install("--fake")
|
||||
|
||||
# Create another environment from the lockfile of the first environment
|
||||
env("create", "-d", env_b, os.path.join(env_a, "spack.lock"))
|
||||
|
||||
# Make sure the view was created
|
||||
with ev.Environment(env_b) as e:
|
||||
assert os.path.isdir(e.view_path_default)
|
||||
|
||||
@@ -44,9 +44,8 @@ def define_plat_exe(exe):
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
|
||||
executables_found(
|
||||
{mock_executable("cmake", output="echo cmake version 1.foo"): define_plat_exe("cmake")}
|
||||
)
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
executables_found({str(cmake_path): define_plat_exe("cmake")})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -71,7 +70,7 @@ def test_find_external_two_instances_same_package(
|
||||
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
|
||||
)
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
executables_found({cmake_path1: cmake_exe, cmake_path2: cmake_exe})
|
||||
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
|
||||
|
||||
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
|
||||
|
||||
@@ -107,7 +106,7 @@ def test_get_executables(working_env, mock_executable):
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
path_to_exe = spack.detection.executables_in_path([os.path.dirname(cmake_path1)])
|
||||
cmake_exe = define_plat_exe("cmake")
|
||||
assert path_to_exe[cmake_path1] == cmake_exe
|
||||
assert path_to_exe[str(cmake_path1)] == cmake_exe
|
||||
|
||||
|
||||
external = SpackCommand("external")
|
||||
@@ -334,7 +333,7 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
|
||||
assert "extra_attributes" in external_gcc
|
||||
extra_attributes = external_gcc["extra_attributes"]
|
||||
assert "prefix" not in extra_attributes
|
||||
assert extra_attributes["compilers"]["c"] == gcc_exe
|
||||
assert extra_attributes["compilers"]["c"] == str(gcc_exe)
|
||||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
|
||||
@@ -357,3 +357,18 @@ def test_find_loaded(database, working_env):
|
||||
output = find("--loaded")
|
||||
expected = find()
|
||||
assert output == expected
|
||||
|
||||
|
||||
@pytest.mark.regression("37712")
|
||||
def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
|
||||
"""Tests that having an active environment with a root spec containing a compiler constrained
|
||||
by a version range (i.e. @X.Y rather the single version than @=X.Y) doesn't result in an error
|
||||
when invoking "spack find".
|
||||
"""
|
||||
test_environment = ev.create_in_dir(tmp_path)
|
||||
test_environment.add("zlib %gcc@12.1.0")
|
||||
test_environment.write()
|
||||
|
||||
with test_environment:
|
||||
output = find()
|
||||
assert "zlib%gcc@12.1.0" in output
|
||||
|
||||
@@ -3,16 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
graph = SpackCommand("graph")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.usefixtures("mock_packages", "database")
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,8 +12,6 @@
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Not yet implemented on Windows")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def parser():
|
||||
|
||||
@@ -966,7 +966,7 @@ def test_compiler_bootstrap_from_binary_mirror(
|
||||
install("gcc@=10.2.0")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.2.0")
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@10.2.0")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall("-y", "gcc@10.2.0")
|
||||
@@ -1138,7 +1138,7 @@ def install_use_buildcache(opt):
|
||||
|
||||
# Populate the buildcache
|
||||
install(package_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, package_name, dependency_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, package_name, dependency_name)
|
||||
|
||||
# Uninstall the all of the packages for clean slate
|
||||
uninstall("-y", "-a")
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.store
|
||||
@@ -15,8 +13,6 @@
|
||||
install = SpackCommand("install")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_mark_mode_required(mutable_database):
|
||||
|
||||
@@ -235,7 +235,7 @@ def test_mirror_destroy(
|
||||
|
||||
# Put a binary package in a buildcache
|
||||
install("--no-cache", spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
@@ -245,7 +245,7 @@ def test_mirror_destroy(
|
||||
|
||||
assert not os.path.exists(mirror_dir.strpath)
|
||||
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
|
||||
@@ -157,7 +157,7 @@ def _parse_types(string):
|
||||
|
||||
|
||||
def test_spec_deptypes_nodes():
|
||||
output = spec("--types", "--cover", "nodes", "dt-diamond")
|
||||
output = spec("--types", "--cover", "nodes", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
@@ -167,7 +167,7 @@ def test_spec_deptypes_nodes():
|
||||
|
||||
|
||||
def test_spec_deptypes_edges():
|
||||
output = spec("--types", "--cover", "edges", "dt-diamond")
|
||||
output = spec("--types", "--cover", "edges", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
|
||||
@@ -319,3 +319,17 @@ def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||
filename = spack.cmd.test.report_filename(args, suite)
|
||||
assert filename != "https://blahblah/submit.php?project=debugging"
|
||||
|
||||
|
||||
def test_test_output_multiple_specs(
|
||||
mock_test_stage, mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config
|
||||
):
|
||||
"""Ensure proper reporting for suite with skipped, failing, and passed tests."""
|
||||
install("test-error", "simple-standalone-test@0.9", "simple-standalone-test@1.0")
|
||||
out = spack_test("run", "test-error", "simple-standalone-test", fail_on_error=False)
|
||||
|
||||
# Note that a spec with passing *and* skipped tests is still considered
|
||||
# to have passed at this level. If you want to see the spec-specific
|
||||
# part result summaries, you'll have to look at the "test-out.txt" files
|
||||
# for each spec.
|
||||
assert "1 failed, 2 passed of 3 specs" in out
|
||||
|
||||
@@ -337,8 +337,6 @@ def test_compiler_flags_differ_identical_compilers(self):
|
||||
|
||||
# Get the compiler that matches the spec (
|
||||
compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture)
|
||||
# Clear cache for compiler config since it has its own cache mechanism outside of config
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
# Configure spack to have two identical compilers with different flags
|
||||
default_dict = spack.compilers._to_dict(compiler)
|
||||
@@ -2137,7 +2135,7 @@ def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
{
|
||||
"compiler": {
|
||||
"spec": "gcc@foo",
|
||||
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
|
||||
"paths": {"cc": str(gcc_path), "cxx": str(gcc_path), "f77": None, "fc": None},
|
||||
"operating_system": "debian6",
|
||||
"modules": [],
|
||||
}
|
||||
@@ -2172,3 +2170,14 @@ def test_concretization_with_compilers_supporting_target_any(self):
|
||||
with spack.config.override("compilers", compiler_configuration):
|
||||
s = spack.spec.Spec("a").concretized()
|
||||
assert s.satisfies("%gcc@12.1.0")
|
||||
|
||||
@pytest.mark.parametrize("spec_str", ["mpileaks", "mpileaks ^mpich"])
|
||||
def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretization):
|
||||
"""Tests that information on virtuals is annotated on DAG edges"""
|
||||
spec = default_mock_concretization(spec_str)
|
||||
mpi_provider = spec["mpi"].name
|
||||
|
||||
edges = spec.edges_to_dependencies(name=mpi_provider)
|
||||
assert len(edges) == 1 and edges[0].virtuals == ("mpi",)
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user