Compare commits
888 Commits
v0.20.2
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
09e1258ed4 | ||
![]() |
b2dcd9bd42 | ||
![]() |
2dc76248d3 | ||
![]() |
bf6eb832ae | ||
![]() |
e8e6d69af5 | ||
![]() |
050d8df5a5 | ||
![]() |
6958b49c2f | ||
![]() |
a1d33e97ec | ||
![]() |
ca9b52bbc5 | ||
![]() |
ae00d7c358 | ||
![]() |
1071c1d8e0 | ||
![]() |
5f6c832020 | ||
![]() |
9e4c4be3f5 | ||
![]() |
6c325a2246 | ||
![]() |
28b884ace5 | ||
![]() |
a13687f022 | ||
![]() |
bd81676e3c | ||
![]() |
7eaa99eabc | ||
![]() |
5cb0b57b30 | ||
![]() |
c6867649b9 | ||
![]() |
c129603192 | ||
![]() |
4e456992e4 | ||
![]() |
10876736e0 | ||
![]() |
982cdd7988 | ||
![]() |
095e48f399 | ||
![]() |
833db65fa3 | ||
![]() |
06268f7b72 | ||
![]() |
f884e71a03 | ||
![]() |
ea1439dfa1 | ||
![]() |
45838cee0b | ||
![]() |
95847a0b37 | ||
![]() |
8861fe0294 | ||
![]() |
12427c6974 | ||
![]() |
a412403d7b | ||
![]() |
6a258c148f | ||
![]() |
8b13440038 | ||
![]() |
632f840d8a | ||
![]() |
8372726a88 | ||
![]() |
5dc84b64e9 | ||
![]() |
8d72b8dd63 | ||
![]() |
adde84e663 | ||
![]() |
f863066b7e | ||
![]() |
082afe04b8 | ||
![]() |
f365386447 | ||
![]() |
a90200528f | ||
![]() |
1ce6feef94 | ||
![]() |
84010108b1 | ||
![]() |
24d2005920 | ||
![]() |
fa73b14247 | ||
![]() |
a99b7886e9 | ||
![]() |
2978911520 | ||
![]() |
d35149d174 | ||
![]() |
a3d11a7973 | ||
![]() |
cb69dbd804 | ||
![]() |
e6f50c5341 | ||
![]() |
32d0b5ca97 | ||
![]() |
b537fad37a | ||
![]() |
78e78eb1da | ||
![]() |
8aeecafd1a | ||
![]() |
a0b2ca2dde | ||
![]() |
08f23f4802 | ||
![]() |
e9dc6dc96c | ||
![]() |
5588b93683 | ||
![]() |
70a38ea1c5 | ||
![]() |
5bd7a0c563 | ||
![]() |
33c5959e23 | ||
![]() |
65288566e5 | ||
![]() |
5e1c4693fd | ||
![]() |
feb4681878 | ||
![]() |
994b5ad49e | ||
![]() |
4654db54c7 | ||
![]() |
a6ebff3a2e | ||
![]() |
6e8fb30b83 | ||
![]() |
465f83b484 | ||
![]() |
ba7ae2c153 | ||
![]() |
54adab7eac | ||
![]() |
30cb55e97c | ||
![]() |
3594203f64 | ||
![]() |
f0add2428c | ||
![]() |
af449b7943 | ||
![]() |
b6d591e39b | ||
![]() |
3811dec18d | ||
![]() |
ae2efa1c27 | ||
![]() |
acdcc8ed71 | ||
![]() |
11bc27d984 | ||
![]() |
4d5ff045e3 | ||
![]() |
63576275be | ||
![]() |
cc74729115 | ||
![]() |
a864108bc1 | ||
![]() |
4aed051b73 | ||
![]() |
43996e84c3 | ||
![]() |
14ebf1985e | ||
![]() |
0c2b98ca70 | ||
![]() |
43143b134a | ||
![]() |
45697582dc | ||
![]() |
9a214ffb75 | ||
![]() |
4286c7398b | ||
![]() |
131acbdacc | ||
![]() |
86d2399c76 | ||
![]() |
d89d6dab6d | ||
![]() |
e6c94e9126 | ||
![]() |
af5b93bb97 | ||
![]() |
dc25da1931 | ||
![]() |
067e40591a | ||
![]() |
483688580e | ||
![]() |
7448acaf98 | ||
![]() |
dfe2d5dca2 | ||
![]() |
b980fcff64 | ||
![]() |
b405559e7b | ||
![]() |
7c5e3ddac5 | ||
![]() |
6ffc11c46d | ||
![]() |
a45d129f86 | ||
![]() |
3ccc527d74 | ||
![]() |
51c75c6da3 | ||
![]() |
c3f4822f92 | ||
![]() |
ccae0ad534 | ||
![]() |
95fffe16a8 | ||
![]() |
640ccf2ef9 | ||
![]() |
78f33bc002 | ||
![]() |
25cc734452 | ||
![]() |
71b17a6945 | ||
![]() |
9425df1259 | ||
![]() |
e81076edd0 | ||
![]() |
84043d97b7 | ||
![]() |
f93b61338c | ||
![]() |
526ae84137 | ||
![]() |
d960d29485 | ||
![]() |
5388ab1ac1 | ||
![]() |
9ce075ed3d | ||
![]() |
46419502cb | ||
![]() |
50623f6bde | ||
![]() |
d8922233ce | ||
![]() |
26a98f4c14 | ||
![]() |
b6b33cfe7a | ||
![]() |
41582f76bd | ||
![]() |
97972d300a | ||
![]() |
7470d14b35 | ||
![]() |
ac825bd9d4 | ||
![]() |
e24bf70af4 | ||
![]() |
dfbdcaf551 | ||
![]() |
cc5ea14a6e | ||
![]() |
efd2ed750d | ||
![]() |
ab10b645c6 | ||
![]() |
e79f275bc9 | ||
![]() |
09b4ed6c80 | ||
![]() |
66f75407d1 | ||
![]() |
5db241c755 | ||
![]() |
1dcc67535a | ||
![]() |
46fe1f48bc | ||
![]() |
30201e3381 | ||
![]() |
501bb88de2 | ||
![]() |
c5adb05433 | ||
![]() |
8528106484 | ||
![]() |
134dceb055 | ||
![]() |
aa3744299b | ||
![]() |
105ac0c377 | ||
![]() |
1949f67a71 | ||
![]() |
0314071763 | ||
![]() |
6e13d7d917 | ||
![]() |
2d4758bdd9 | ||
![]() |
ff002316a8 | ||
![]() |
251282812b | ||
![]() |
4ac43b5032 | ||
![]() |
05b6ac16bc | ||
![]() |
8164712264 | ||
![]() |
ce0b9ea8cf | ||
![]() |
c560053c39 | ||
![]() |
5b0ca6d287 | ||
![]() |
887d356e01 | ||
![]() |
95ca9dea89 | ||
![]() |
cb23362b7f | ||
![]() |
42c4a8b388 | ||
![]() |
cc1f403385 | ||
![]() |
b1d281f197 | ||
![]() |
29a1c418b3 | ||
![]() |
36dd325187 | ||
![]() |
585e150816 | ||
![]() |
9a30ba1a4d | ||
![]() |
d5bb152165 | ||
![]() |
0c6d0541f0 | ||
![]() |
7b977dc103 | ||
![]() |
f98bfebce4 | ||
![]() |
4907315079 | ||
![]() |
48168de1cc | ||
![]() |
d99892e490 | ||
![]() |
09d9b48957 | ||
![]() |
62aa9d87ee | ||
![]() |
0470fe545f | ||
![]() |
db8bf333d3 | ||
![]() |
f73c8f2255 | ||
![]() |
42ed4d81b7 | ||
![]() |
e76b039997 | ||
![]() |
b49d098e3f | ||
![]() |
cd67b2a1a9 | ||
![]() |
a076548bd4 | ||
![]() |
3d342ac69a | ||
![]() |
88fc8ae591 | ||
![]() |
ff6ac42812 | ||
![]() |
c96f93b2a3 | ||
![]() |
cbe4a48291 | ||
![]() |
ebd41134fe | ||
![]() |
77817a0f05 | ||
![]() |
590d3ba6cf | ||
![]() |
1e8988f11d | ||
![]() |
a889669cbc | ||
![]() |
fde33e66be | ||
![]() |
6314ddacf2 | ||
![]() |
f935f36b21 | ||
![]() |
082934f73f | ||
![]() |
3e9e01e496 | ||
![]() |
2abbfe719d | ||
![]() |
ace20c5d29 | ||
![]() |
f35fcee6be | ||
![]() |
103370d84a | ||
![]() |
6e47f1645f | ||
![]() |
53eb6c46db | ||
![]() |
00d769d644 | ||
![]() |
b6b34aa0fe | ||
![]() |
83b9196e78 | ||
![]() |
ed76eab694 | ||
![]() |
930b843885 | ||
![]() |
f53c68e005 | ||
![]() |
12a22eebc7 | ||
![]() |
69eb15936c | ||
![]() |
c69dea5465 | ||
![]() |
3bd8c4df28 | ||
![]() |
02dc697831 | ||
![]() |
87cb39b860 | ||
![]() |
468138bb4f | ||
![]() |
a8534b7345 | ||
![]() |
8ba45b0b99 | ||
![]() |
6ae358edd8 | ||
![]() |
5ce45e0cee | ||
![]() |
b2901f1737 | ||
![]() |
6b552dedbc | ||
![]() |
6b3d2c535f | ||
![]() |
2727bd92d8 | ||
![]() |
ebbfc0363b | ||
![]() |
1b6e1fc852 | ||
![]() |
1376ec4887 | ||
![]() |
0eec7c5c53 | ||
![]() |
05dd240997 | ||
![]() |
fb16c81b6c | ||
![]() |
7c3b33416f | ||
![]() |
6755b74d22 | ||
![]() |
d0e843ce03 | ||
![]() |
37f6231f2a | ||
![]() |
d85f25a901 | ||
![]() |
2041b92d3a | ||
![]() |
f461069888 | ||
![]() |
9eb3de85c5 | ||
![]() |
92d970498a | ||
![]() |
bd5e99120d | ||
![]() |
a7e307bd81 | ||
![]() |
55152781cb | ||
![]() |
8ce0c7771c | ||
![]() |
7e0dfa270f | ||
![]() |
7dc485d288 | ||
![]() |
5c6c3b403b | ||
![]() |
242854f266 | ||
![]() |
e9406a7d9e | ||
![]() |
0ac1c52d17 | ||
![]() |
a3c42715db | ||
![]() |
0f27188383 | ||
![]() |
99f3b9f064 | ||
![]() |
d1bc4c4ef1 | ||
![]() |
69a5c55702 | ||
![]() |
2972d5847c | ||
![]() |
1577eb9602 | ||
![]() |
2f97c6ead2 | ||
![]() |
1df4afb53f | ||
![]() |
4991f0e484 | ||
![]() |
09fd7d68eb | ||
![]() |
2ace8a55c1 | ||
![]() |
861acb9467 | ||
![]() |
eea743de46 | ||
![]() |
e2b6e5a7ec | ||
![]() |
2f2dc3695c | ||
![]() |
6eb5e57199 | ||
![]() |
9a047eb95f | ||
![]() |
ef42fd7a2f | ||
![]() |
e642c2ea28 | ||
![]() |
f27d012e0c | ||
![]() |
c638311796 | ||
![]() |
2a02bea405 | ||
![]() |
219b42d991 | ||
![]() |
c290ec1f62 | ||
![]() |
e7ede86733 | ||
![]() |
e3e7609af4 | ||
![]() |
49d7ebec36 | ||
![]() |
7c3d82d819 | ||
![]() |
1c0fbec9ce | ||
![]() |
ca4d60ae25 | ||
![]() |
dc571e20d6 | ||
![]() |
1485275d0c | ||
![]() |
1afbf72037 | ||
![]() |
407fd80f95 | ||
![]() |
62525d9076 | ||
![]() |
c2371263d1 | ||
![]() |
5a870182ec | ||
![]() |
e33ad83256 | ||
![]() |
0352a1df5d | ||
![]() |
ade44bce62 | ||
![]() |
ddb29ebc34 | ||
![]() |
19a62630e5 | ||
![]() |
5626802aa0 | ||
![]() |
f68063afbc | ||
![]() |
8103d019d6 | ||
![]() |
ce89cdd9d7 | ||
![]() |
20d9b356f0 | ||
![]() |
3401438a3a | ||
![]() |
dcf1999d22 | ||
![]() |
9e3c3ae298 | ||
![]() |
40d6b84b4d | ||
![]() |
2db09f27af | ||
![]() |
6979d6a96f | ||
![]() |
deffd2acc9 | ||
![]() |
988f71f434 | ||
![]() |
4fe76f973a | ||
![]() |
8e4e6ad529 | ||
![]() |
3586a2dbe3 | ||
![]() |
4648939043 | ||
![]() |
746eaaf01a | ||
![]() |
bd2f78ae9a | ||
![]() |
a4ebe01dec | ||
![]() |
94e9e18558 | ||
![]() |
d2e0ac4d1f | ||
![]() |
36321fef1c | ||
![]() |
e879877878 | ||
![]() |
f0bce3eb25 | ||
![]() |
316bfd8b7d | ||
![]() |
92593fecd5 | ||
![]() |
8db5fecdf5 | ||
![]() |
eee696f320 | ||
![]() |
8689cf392f | ||
![]() |
15d4cce2eb | ||
![]() |
45fbb82d1a | ||
![]() |
2861c89b89 | ||
![]() |
135bfeeb27 | ||
![]() |
8fa9c66a7d | ||
![]() |
5e6174cbe2 | ||
![]() |
b4ad883b0d | ||
![]() |
a681111a23 | ||
![]() |
d2436afb66 | ||
![]() |
e43444cbb6 | ||
![]() |
8c0d947114 | ||
![]() |
5ba4a2b83a | ||
![]() |
da45073ef9 | ||
![]() |
61e17fb36d | ||
![]() |
9f13a90dd2 | ||
![]() |
ef4b35ea63 | ||
![]() |
66187c8a6e | ||
![]() |
c8d95512fc | ||
![]() |
c74fa648b9 | ||
![]() |
4cc5e9cac6 | ||
![]() |
41345d18f9 | ||
![]() |
0dd1316b68 | ||
![]() |
d8cc185e22 | ||
![]() |
061051270c | ||
![]() |
61445159db | ||
![]() |
7fa3c7f0fa | ||
![]() |
9c0fe30f42 | ||
![]() |
d00010819f | ||
![]() |
248b05b32a | ||
![]() |
8232e934e9 | ||
![]() |
9d005839af | ||
![]() |
a7e5c73608 | ||
![]() |
7896625919 | ||
![]() |
fb43cb8166 | ||
![]() |
28f68e5d11 | ||
![]() |
1199eeed0b | ||
![]() |
8ffeb4900b | ||
![]() |
456550da3f | ||
![]() |
b2676fe2dd | ||
![]() |
8561ec6249 | ||
![]() |
5b775d82ac | ||
![]() |
b43088cc16 | ||
![]() |
237eab136a | ||
![]() |
ffffa2794b | ||
![]() |
433b44403f | ||
![]() |
fa2e1c0653 | ||
![]() |
00257f6824 | ||
![]() |
3b8366f3d3 | ||
![]() |
a73f511404 | ||
![]() |
c823e01baf | ||
![]() |
4188080899 | ||
![]() |
ef6ea2c93f | ||
![]() |
3c672905d0 | ||
![]() |
ee106c747f | ||
![]() |
295726e6b8 | ||
![]() |
2654d64a3c | ||
![]() |
d91ec8500f | ||
![]() |
c354cc51d0 | ||
![]() |
d5747a61e7 | ||
![]() |
e88c747abc | ||
![]() |
cfe9e5bca4 | ||
![]() |
48f7655a62 | ||
![]() |
a1111a9858 | ||
![]() |
b8b9a798bf | ||
![]() |
7a1e94c775 | ||
![]() |
8c4b2173d2 | ||
![]() |
4c4cd7b3ea | ||
![]() |
e92554414b | ||
![]() |
d165e2c94b | ||
![]() |
a97bd31afe | ||
![]() |
d7719b26f9 | ||
![]() |
855c0fd9e0 | ||
![]() |
4156397027 | ||
![]() |
b4bbe5e305 | ||
![]() |
f5b595071e | ||
![]() |
b6f2184cce | ||
![]() |
9288067380 | ||
![]() |
ddfc43be96 | ||
![]() |
63cad5d338 | ||
![]() |
436ecdfb19 | ||
![]() |
06817600e4 | ||
![]() |
4ae1a73d54 | ||
![]() |
f29aab0d03 | ||
![]() |
cea1b3123e | ||
![]() |
b22ccf279d | ||
![]() |
81e15ce36e | ||
![]() |
8907e52933 | ||
![]() |
80cefedac5 | ||
![]() |
b85a66f77a | ||
![]() |
a0ba3d890a | ||
![]() |
315873cbd3 | ||
![]() |
e05095af90 | ||
![]() |
e0d6a73f96 | ||
![]() |
6ebfb41ad9 | ||
![]() |
d0aa01c807 | ||
![]() |
1265c7df47 | ||
![]() |
91e3f14959 | ||
![]() |
5f03eb650d | ||
![]() |
e0e6133444 | ||
![]() |
ee68baf254 | ||
![]() |
785c1a2070 | ||
![]() |
79656655ba | ||
![]() |
74921788a8 | ||
![]() |
b313b28e64 | ||
![]() |
5f1bc15e80 | ||
![]() |
fa9fb60df3 | ||
![]() |
e759e6c410 | ||
![]() |
f41446258a | ||
![]() |
268649654d | ||
![]() |
12e249f64e | ||
![]() |
c34cd76f2a | ||
![]() |
815b210fc8 | ||
![]() |
e5d5efb4c1 | ||
![]() |
0aa4b4d990 | ||
![]() |
01c1d334ae | ||
![]() |
717fc11a46 | ||
![]() |
d21c49e329 | ||
![]() |
6937d9dddc | ||
![]() |
4c2531d5fb | ||
![]() |
62fd890c52 | ||
![]() |
4772fd7723 | ||
![]() |
7c11faceb0 | ||
![]() |
053550e28a | ||
![]() |
3ed7258447 | ||
![]() |
a5cf5baa9e | ||
![]() |
ec8039cc74 | ||
![]() |
9bfa840c27 | ||
![]() |
9865f42335 | ||
![]() |
dba2829871 | ||
![]() |
8c0e1fbed9 | ||
![]() |
187488b75b | ||
![]() |
2aa35fef3e | ||
![]() |
d373fc36ae | ||
![]() |
e483762015 | ||
![]() |
5840a00000 | ||
![]() |
110f836927 | ||
![]() |
d6765f66ae | ||
![]() |
19dac780e8 | ||
![]() |
b82b549c59 | ||
![]() |
b376401ece | ||
![]() |
7d956dbe9e | ||
![]() |
6db1d84bb0 | ||
![]() |
2094fa3056 | ||
![]() |
3d255bc213 | ||
![]() |
5538dda722 | ||
![]() |
1c0d89bf25 | ||
![]() |
4cc0199fbb | ||
![]() |
edb8226fff | ||
![]() |
ef972cf642 | ||
![]() |
50c13541e4 | ||
![]() |
fd5d7cea6e | ||
![]() |
526314b275 | ||
![]() |
7b37c30019 | ||
![]() |
dc03c3ad9e | ||
![]() |
61b485f75d | ||
![]() |
e24151783f | ||
![]() |
ed9714e5ae | ||
![]() |
ea620a083c | ||
![]() |
504a8be666 | ||
![]() |
d460870c77 | ||
![]() |
f0f77251b3 | ||
![]() |
bdd454b70b | ||
![]() |
aea6662774 | ||
![]() |
fe6bcb36c7 | ||
![]() |
2474a2efe1 | ||
![]() |
4cfd49019c | ||
![]() |
7beae8af30 | ||
![]() |
22fc5d2039 | ||
![]() |
b70fc461a4 | ||
![]() |
e756436d7c | ||
![]() |
8dd87e2572 | ||
![]() |
853bf95bd2 | ||
![]() |
1c80d07fd2 | ||
![]() |
6fd8001604 | ||
![]() |
c08f9fd6fc | ||
![]() |
c3fb998414 | ||
![]() |
3368a98210 | ||
![]() |
606b7c7f16 | ||
![]() |
2f4e66be09 | ||
![]() |
9ce3e8707c | ||
![]() |
d6a96745ee | ||
![]() |
a0fcdd092b | ||
![]() |
e17d09e607 | ||
![]() |
847d67f223 | ||
![]() |
7ae0e06a62 | ||
![]() |
d3df97df8b | ||
![]() |
7d5d075809 | ||
![]() |
237a0d8999 | ||
![]() |
6952ed9950 | ||
![]() |
3e2d1bd413 | ||
![]() |
9dfba4659e | ||
![]() |
7fca252aa4 | ||
![]() |
fa23a0228f | ||
![]() |
ed76966a3a | ||
![]() |
2015a51d1a | ||
![]() |
34b8fe827e | ||
![]() |
6f1ed9b2e4 | ||
![]() |
dd00f50943 | ||
![]() |
f0ec625321 | ||
![]() |
d406c371a8 | ||
![]() |
42d374a34d | ||
![]() |
d90e4fcc3d | ||
![]() |
a44fde9dc9 | ||
![]() |
9ac8841dab | ||
![]() |
a1f87638ec | ||
![]() |
3b55e0a65d | ||
![]() |
42667fe7fa | ||
![]() |
cd27611d2f | ||
![]() |
b111d2172e | ||
![]() |
055263fa3c | ||
![]() |
f34f207bdc | ||
![]() |
0c9f0fd40d | ||
![]() |
24d5b1e645 | ||
![]() |
616f7bcaef | ||
![]() |
dace0316a2 | ||
![]() |
3bb86418b8 | ||
![]() |
6f6489a2c7 | ||
![]() |
543b697df1 | ||
![]() |
042dc2e1d8 | ||
![]() |
f745e49d9a | ||
![]() |
eda21cdfba | ||
![]() |
bc8b026072 | ||
![]() |
0f84782fcc | ||
![]() |
43b86ce282 | ||
![]() |
d30698d9a8 | ||
![]() |
8e9efa86c8 | ||
![]() |
84faf5a6cf | ||
![]() |
9428749a3c | ||
![]() |
efdac68c28 | ||
![]() |
5398c31e82 | ||
![]() |
188168c476 | ||
![]() |
4af84ac208 | ||
![]() |
deb8b51098 | ||
![]() |
0d582b2ea9 | ||
![]() |
f88b01c34b | ||
![]() |
0533c6a1b8 | ||
![]() |
f73d5c2b0e | ||
![]() |
567d0ee455 | ||
![]() |
577df6f498 | ||
![]() |
8790efbcfe | ||
![]() |
212b1edb6b | ||
![]() |
d85a27f317 | ||
![]() |
5622afbfd1 | ||
![]() |
f345038317 | ||
![]() |
e43d4cfee0 | ||
![]() |
7070658e2a | ||
![]() |
fc4b032fb4 | ||
![]() |
8c97d8ad3f | ||
![]() |
26107fe6b2 | ||
![]() |
9278c0df21 | ||
![]() |
37e95713f4 | ||
![]() |
3ae8a3a517 | ||
![]() |
031af84e90 | ||
![]() |
7d4b65491d | ||
![]() |
3038d1e7cd | ||
![]() |
b2e6ef97ce | ||
![]() |
e55236ce5b | ||
![]() |
68dfd6ba6e | ||
![]() |
38d2459f94 | ||
![]() |
e309f367af | ||
![]() |
3b59c95323 | ||
![]() |
fddaeadff8 | ||
![]() |
c85eaf9dc5 | ||
![]() |
ddec7f8aec | ||
![]() |
f057d7154b | ||
![]() |
a102950d67 | ||
![]() |
783be9b350 | ||
![]() |
27c8135207 | ||
![]() |
77ce4701b9 | ||
![]() |
73ad3f729e | ||
![]() |
1e7a64ad85 | ||
![]() |
3a5864bcdb | ||
![]() |
7e13a7dccb | ||
![]() |
e3249fa155 | ||
![]() |
0c20760576 | ||
![]() |
7ee7995493 | ||
![]() |
ba1fac1c31 | ||
![]() |
b05f0ecb6f | ||
![]() |
d5c66b75c3 | ||
![]() |
98303d6956 | ||
![]() |
4622d638a6 | ||
![]() |
02023265fc | ||
![]() |
8a075998f8 | ||
![]() |
f2f48b1872 | ||
![]() |
168d63c447 | ||
![]() |
c25d4cbc1d | ||
![]() |
ccb07538f7 | ||
![]() |
1356b13b2f | ||
![]() |
935f862863 | ||
![]() |
9f6d9df302 | ||
![]() |
65d33c02a1 | ||
![]() |
40073e7b21 | ||
![]() |
752e02e2f2 | ||
![]() |
d717b3a33f | ||
![]() |
9817f24c9a | ||
![]() |
1f7c4b0557 | ||
![]() |
6c42d2b7f7 | ||
![]() |
8df036a5a5 | ||
![]() |
582ebee74c | ||
![]() |
1017b9ddde | ||
![]() |
80ae73119d | ||
![]() |
1d88f690a4 | ||
![]() |
fbb271d804 | ||
![]() |
d6aac873b7 | ||
![]() |
ab3ffd9361 | ||
![]() |
3b9454a5cc | ||
![]() |
c8eb0f9361 | ||
![]() |
fb0f14eb06 | ||
![]() |
e489ee4e2e | ||
![]() |
fcd49f2f08 | ||
![]() |
b3268c2703 | ||
![]() |
d1bfcfafe3 | ||
![]() |
490c9f5e16 | ||
![]() |
85628d1474 | ||
![]() |
720c34d18d | ||
![]() |
cd175377ca | ||
![]() |
b91ec05e13 | ||
![]() |
3bb15f420b | ||
![]() |
124a81df5b | ||
![]() |
d9472c083d | ||
![]() |
ac2a5ef4dd | ||
![]() |
ea210a6acf | ||
![]() |
afb3bef7af | ||
![]() |
b5b5881426 | ||
![]() |
76fc7915a8 | ||
![]() |
e7798b619b | ||
![]() |
8ecef12a20 | ||
![]() |
694292ebbf | ||
![]() |
7f18f6f8a1 | ||
![]() |
0b12a480eb | ||
![]() |
2d91a79af3 | ||
![]() |
72fcee7227 | ||
![]() |
d147ef231f | ||
![]() |
1c7af83d32 | ||
![]() |
b982dfc071 | ||
![]() |
c0da8a00fc | ||
![]() |
3f18f689d8 | ||
![]() |
9dc4553cf3 | ||
![]() |
9a99c94b75 | ||
![]() |
682f0b2a54 | ||
![]() |
dbab0c1ff5 | ||
![]() |
2bf95f5340 | ||
![]() |
55561405b8 | ||
![]() |
8eef458cea | ||
![]() |
64eea9d996 | ||
![]() |
60b4e2128b | ||
![]() |
2f8cea2792 | ||
![]() |
06f9bcf734 | ||
![]() |
ee2725762f | ||
![]() |
eace0a177c | ||
![]() |
80c7d74707 | ||
![]() |
a6f5bf821d | ||
![]() |
b214406253 | ||
![]() |
5b003d80e5 | ||
![]() |
185b2d3ee7 | ||
![]() |
71bb2a1899 | ||
![]() |
785c31b730 | ||
![]() |
175da4a88a | ||
![]() |
73fc1ef11c | ||
![]() |
2d77e44f6f | ||
![]() |
033599c4cd | ||
![]() |
8096ed4b22 | ||
![]() |
b49bfe25af | ||
![]() |
8b2f34d802 | ||
![]() |
3daed0d6a7 | ||
![]() |
d6c1f75e8d | ||
![]() |
c80a4c1ddc | ||
![]() |
466abcb62d | ||
![]() |
69e99f0c16 | ||
![]() |
bbee6dfc58 | ||
![]() |
2d60cf120b | ||
![]() |
db17fc2f33 | ||
![]() |
c62080d498 | ||
![]() |
f9bbe549fa | ||
![]() |
55d7fec69c | ||
![]() |
e938907150 | ||
![]() |
0c40b86e96 | ||
![]() |
3d4cf0d8eb | ||
![]() |
966e19d278 | ||
![]() |
8f930462bd | ||
![]() |
bf4fccee15 | ||
![]() |
784771a008 | ||
![]() |
e4a9d9ae5b | ||
![]() |
a6886983dc | ||
![]() |
93a34a9635 | ||
![]() |
91a54029f9 | ||
![]() |
5400b49ed6 | ||
![]() |
c17fc3c0c1 | ||
![]() |
6f248836ea | ||
![]() |
693c1821b0 | ||
![]() |
62afe3bd5a | ||
![]() |
53a756d045 | ||
![]() |
321b687ae6 | ||
![]() |
c8617f0574 | ||
![]() |
7843e2ead0 | ||
![]() |
dca3d071d7 | ||
![]() |
436f077482 | ||
![]() |
ab3f705019 | ||
![]() |
d739989ec8 | ||
![]() |
52ee1967d6 | ||
![]() |
1af7284b5d | ||
![]() |
e1bcefd805 | ||
![]() |
2159b0183d | ||
![]() |
078fd225a9 | ||
![]() |
83974828c7 | ||
![]() |
2412f74557 | ||
![]() |
db06d3621d | ||
![]() |
c25170d2f9 | ||
![]() |
b3dfe13670 | ||
![]() |
6358e84b48 | ||
![]() |
8e634d8e49 | ||
![]() |
1a21376515 | ||
![]() |
bf45a2b6d3 | ||
![]() |
475ce955e7 | ||
![]() |
5e44289787 | ||
![]() |
e66888511f | ||
![]() |
e9e5beee1f | ||
![]() |
ffd134c09d | ||
![]() |
bfadd5c9a5 | ||
![]() |
16e9279420 | ||
![]() |
ac0903ef9f | ||
![]() |
648839dffd | ||
![]() |
489a604920 | ||
![]() |
2ac3435810 | ||
![]() |
69ea180d26 | ||
![]() |
f52f217df0 | ||
![]() |
df74aa5d7e | ||
![]() |
41932c53ae | ||
![]() |
4296db794f | ||
![]() |
9ab9302409 | ||
![]() |
0187376e54 | ||
![]() |
7340d2cb83 | ||
![]() |
641d4477d5 | ||
![]() |
3ff2fb69af | ||
![]() |
e3024b1bcb | ||
![]() |
e733b87865 | ||
![]() |
919985dc1b | ||
![]() |
d746f7d427 | ||
![]() |
b6deab515b | ||
![]() |
848220c4ba | ||
![]() |
98462bd27e | ||
![]() |
2e2515266d | ||
![]() |
776ab13276 | ||
![]() |
c2ce9a6d93 | ||
![]() |
4e3ed56dfa | ||
![]() |
dcfcc03497 | ||
![]() |
125c20bc06 | ||
![]() |
f7696a4480 | ||
![]() |
a5d7667cb6 | ||
![]() |
d45818ccff | ||
![]() |
bcb7af6eb3 | ||
![]() |
f438fb6c79 | ||
![]() |
371a8a361a | ||
![]() |
86b9ce1c88 | ||
![]() |
05232034f5 | ||
![]() |
7a3da0f606 | ||
![]() |
d96406a161 | ||
![]() |
ffa5962356 | ||
![]() |
67e74da3ba | ||
![]() |
9ee2d79de1 | ||
![]() |
79e4a13eee | ||
![]() |
4627438373 | ||
![]() |
badaaf7092 | ||
![]() |
815ac000cc | ||
![]() |
7bc5b26c52 | ||
![]() |
a0e7ca94b2 | ||
![]() |
e56c90d839 | ||
![]() |
54003d4d72 | ||
![]() |
c47b554fa1 | ||
![]() |
b027f64a7f | ||
![]() |
3765a5f7f8 | ||
![]() |
690661eadd | ||
![]() |
f7bbc326e4 | ||
![]() |
a184bfc1a6 | ||
![]() |
81634440fb | ||
![]() |
711d7683ac | ||
![]() |
967356bcf5 | ||
![]() |
c006ed034a | ||
![]() |
d065c65d94 | ||
![]() |
e23c372ff1 | ||
![]() |
25d2de5629 | ||
![]() |
d73a23ce35 | ||
![]() |
a62cb3c0f4 | ||
![]() |
177da4595e | ||
![]() |
e4f05129fe | ||
![]() |
c25b994917 | ||
![]() |
95c4c5270a | ||
![]() |
1cf6a15a08 | ||
![]() |
47d206611a | ||
![]() |
a6789cf653 | ||
![]() |
933cd858e0 | ||
![]() |
8856361076 | ||
![]() |
d826df7ef6 | ||
![]() |
d8a9b42da6 | ||
![]() |
7d926f86e8 | ||
![]() |
1579544d57 | ||
![]() |
1cee3fb4a5 | ||
![]() |
a8e2ad53dd | ||
![]() |
6821fa7246 | ||
![]() |
09c68da1bd | ||
![]() |
73064d62cf | ||
![]() |
168ed2a782 | ||
![]() |
9f60b29495 | ||
![]() |
7abcd78426 | ||
![]() |
d5295301de | ||
![]() |
beccc49b81 | ||
![]() |
037e7ffe33 | ||
![]() |
293da8ed20 | ||
![]() |
2780ab2f6c | ||
![]() |
1ed3c81b58 | ||
![]() |
50ce0a25b2 | ||
![]() |
d784227603 | ||
![]() |
ab9ed91539 | ||
![]() |
421256063e | ||
![]() |
75459bc70c | ||
![]() |
33752eabb8 | ||
![]() |
f1d1bb9167 | ||
![]() |
68eaff24b0 | ||
![]() |
862024cae1 | ||
![]() |
9d6bcd67c3 | ||
![]() |
d97ecfe147 | ||
![]() |
0d991de50a | ||
![]() |
4f278a0255 | ||
![]() |
6e72a3cff1 | ||
![]() |
1532c77ce6 | ||
![]() |
5ffbce275c | ||
![]() |
0e2ff2dddb | ||
![]() |
c0c446a095 | ||
![]() |
33dbd44449 | ||
![]() |
7b0979c1e9 | ||
![]() |
c9849dd41d | ||
![]() |
d44e97d3f2 | ||
![]() |
8713ab0f67 | ||
![]() |
6a47339bf8 | ||
![]() |
1c0fb6d641 | ||
![]() |
b45eee29eb | ||
![]() |
6d26274459 | ||
![]() |
2fb07de7bc | ||
![]() |
7678dc6b49 | ||
![]() |
1944dd55a7 | ||
![]() |
0b6c724743 | ||
![]() |
fa98023375 | ||
![]() |
e79a911bac | ||
![]() |
fd3efc71fd | ||
![]() |
0458de18de | ||
![]() |
f94ac8c770 | ||
![]() |
a03c28a916 | ||
![]() |
7b7fdf27f3 | ||
![]() |
192e564e26 | ||
![]() |
b8c5099cde | ||
![]() |
ea5bca9067 | ||
![]() |
e33eafd34f | ||
![]() |
e1344b5497 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,3 +5,8 @@ updates:
|
|||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
|
# Requirements to build documentation
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/lib/spack/docs"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
|||||||
package-audits:
|
package-audits:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -38,7 +38,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) audit packages
|
$(which spack) audit packages
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,audits
|
flags: unittests,linux,audits
|
||||||
|
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -62,7 +62,7 @@ jobs:
|
|||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -99,7 +99,7 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Setup repo
|
||||||
@@ -158,7 +158,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew install cmake bison@2.7 tree
|
brew install cmake bison@2.7 tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -179,7 +179,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -ex
|
||||||
@@ -204,7 +204,7 @@ jobs:
|
|||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Setup repo
|
||||||
@@ -247,7 +247,7 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -283,7 +283,7 @@ jobs:
|
|||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
gawk
|
gawk
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -316,7 +316,7 @@ jobs:
|
|||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -333,7 +333,7 @@ jobs:
|
|||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
14
.github/workflows/build-containers.yml
vendored
14
.github/workflows/build-containers.yml
vendored
@@ -49,14 +49,14 @@ jobs:
|
|||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
|
|
||||||
- name: Set Container Tag Normal (Nightly)
|
- name: Set Container Tag Normal (Nightly)
|
||||||
run: |
|
run: |
|
||||||
@@ -92,13 +92,13 @@ jobs:
|
|||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
uses: docker/setup-buildx-action@16c0bc4a6e6ada2cfd8afd41d22d95379cf7c32a # @v1
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -106,13 +106,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: Windows Paraview Nightly
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 2 * * *' # Run at 2 am
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell:
|
||||||
|
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-paraview-deps:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||||
|
- name: Build Test
|
||||||
|
run: |
|
||||||
|
spack compiler find
|
||||||
|
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||||
|
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||||
|
exit 0
|
27
.github/workflows/unit_tests.yaml
vendored
27
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -87,17 +87,17 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -118,7 +118,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
|
|
||||||
@@ -133,10 +133,11 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
@@ -151,10 +152,10 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -175,7 +176,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
# Run unit tests on MacOS
|
# Run unit tests on MacOS
|
||||||
@@ -185,10 +186,10 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10"]
|
python-version: ["3.10"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -210,6 +211,6 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
|
12
.github/workflows/valid-style.yml
vendored
12
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -35,10 +35,10 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -68,10 +68,11 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
@@ -80,6 +81,7 @@ jobs:
|
|||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
spack debug report
|
||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack style -t black
|
spack style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
|
16
.github/workflows/windows_python.yml
vendored
16
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
|||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -33,16 +33,16 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -57,16 +57,16 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
build-abseil:
|
build-abseil:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
@@ -1,10 +1,16 @@
|
|||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
|
build:
|
||||||
|
os: "ubuntu-22.04"
|
||||||
|
apt_packages:
|
||||||
|
- graphviz
|
||||||
|
tools:
|
||||||
|
python: "3.11"
|
||||||
|
|
||||||
sphinx:
|
sphinx:
|
||||||
configuration: lib/spack/docs/conf.py
|
configuration: lib/spack/docs/conf.py
|
||||||
fail_on_warning: true
|
fail_on_warning: true
|
||||||
|
|
||||||
python:
|
python:
|
||||||
version: 3.7
|
|
||||||
install:
|
install:
|
||||||
- requirements: lib/spack/docs/requirements.txt
|
- requirements: lib/spack/docs/requirements.txt
|
||||||
|
218
CHANGELOG.md
218
CHANGELOG.md
@@ -1,3 +1,221 @@
|
|||||||
|
# v0.20.0 (2023-05-21)
|
||||||
|
|
||||||
|
`v0.20.0` is a major feature release.
|
||||||
|
|
||||||
|
## Features in this release
|
||||||
|
|
||||||
|
1. **`requires()` directive and enhanced package requirements**
|
||||||
|
|
||||||
|
We've added some more enhancements to requirements in Spack (#36286).
|
||||||
|
|
||||||
|
There is a new `requires()` directive for packages. `requires()` is the opposite of
|
||||||
|
`conflicts()`. You can use it to impose constraints on this package when certain
|
||||||
|
conditions are met:
|
||||||
|
|
||||||
|
```python
|
||||||
|
requires(
|
||||||
|
"%apple-clang",
|
||||||
|
when="platform=darwin",
|
||||||
|
msg="This package builds only with clang on macOS"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
More on this in [the docs](
|
||||||
|
https://spack.rtfd.io/en/latest/packaging_guide.html#conflicts-and-requirements).
|
||||||
|
|
||||||
|
You can also now add a `when:` clause to `requires:` in your `packages.yaml`
|
||||||
|
configuration or in an environment:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
packages:
|
||||||
|
openmpi:
|
||||||
|
require:
|
||||||
|
- any_of: ["%gcc"]
|
||||||
|
when: "@:4.1.4"
|
||||||
|
message: "Only OpenMPI 4.1.5 and up can build with fancy compilers"
|
||||||
|
```
|
||||||
|
|
||||||
|
More details can be found [here](
|
||||||
|
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements)
|
||||||
|
|
||||||
|
2. **Exact versions**
|
||||||
|
|
||||||
|
Spack did not previously have a way to distinguish a version if it was a prefix of
|
||||||
|
some other version. For example, `@3.2` would match `3.2`, `3.2.1`, `3.2.2`, etc. You
|
||||||
|
can now match *exactly* `3.2` with `@=3.2`. This is useful, for example, if you need
|
||||||
|
to patch *only* the `3.2` version of a package. The new syntax is described in [the docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier).
|
||||||
|
|
||||||
|
Generally, when writing packages, you should prefer to use ranges like `@3.2` over
|
||||||
|
the specific versions, as this allows the concretizer more leeway when selecting
|
||||||
|
versions of dependencies. More details and recommendations are in the [packaging guide](
|
||||||
|
https://spack.readthedocs.io/en/latest/packaging_guide.html#ranges-versus-specific-versions).
|
||||||
|
|
||||||
|
See #36273 for full details on the version refactor.
|
||||||
|
|
||||||
|
3. **New testing interface**
|
||||||
|
|
||||||
|
Writing package tests is now much simpler with a new [test interface](
|
||||||
|
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests).
|
||||||
|
|
||||||
|
Writing a test is now as easy as adding a method that starts with `test_`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MyPackage(Package):
|
||||||
|
...
|
||||||
|
|
||||||
|
def test_always_fails(self):
|
||||||
|
"""use assert to always fail"""
|
||||||
|
assert False
|
||||||
|
|
||||||
|
def test_example(self):
|
||||||
|
"""run installed example"""
|
||||||
|
example = which(self.prefix.bin.example)
|
||||||
|
example()
|
||||||
|
```
|
||||||
|
|
||||||
|
You can use Python's native `assert` statement to implement your checks -- no more
|
||||||
|
need to fiddle with `run_test` or other test framework methods. Spack will
|
||||||
|
introspect the class and run `test_*` methods when you run `spack test`,
|
||||||
|
|
||||||
|
4. **More stable concretization**
|
||||||
|
|
||||||
|
* Now, `spack concretize` will *only* concretize the new portions of the environment
|
||||||
|
and will not change existing parts of an environment unless you specify `--force`.
|
||||||
|
This has always been true for `unify:false`, but not for `unify:true` and
|
||||||
|
`unify:when_possible` environments. Now it is true for all of them (#37438, #37681).
|
||||||
|
|
||||||
|
* The concretizer has a new `--reuse-deps` argument that *only* reuses dependencies.
|
||||||
|
That is, it will always treat the *roots* of your environment as it would with
|
||||||
|
`--fresh`. This allows you to upgrade just the roots of your environment while
|
||||||
|
keeping everything else stable (#30990).
|
||||||
|
|
||||||
|
5. **Weekly develop snapshot releases**
|
||||||
|
|
||||||
|
Since last year, we have maintained a buildcache of `develop` at
|
||||||
|
https://binaries.spack.io/develop, but the cache can grow to contain so many builds
|
||||||
|
as to be unwieldy. When we get a stable `develop` build, we snapshot the release and
|
||||||
|
add a corresponding tag the Spack repository. So, you can use a stack from a specific
|
||||||
|
day. There are now tags in the spack repository like:
|
||||||
|
|
||||||
|
* `develop-2023-05-14`
|
||||||
|
* `develop-2023-05-18`
|
||||||
|
|
||||||
|
that correspond to build caches like:
|
||||||
|
|
||||||
|
* https://binaries.spack.io/develop-2023-05-14/e4s
|
||||||
|
* https://binaries.spack.io/develop-2023-05-18/e4s
|
||||||
|
|
||||||
|
We plan to store these snapshot releases weekly.
|
||||||
|
|
||||||
|
6. **Specs in buildcaches can be referenced by hash.**
|
||||||
|
|
||||||
|
* Previously, you could run `spack buildcache list` and see the hashes in
|
||||||
|
buildcaches, but referring to them by hash would fail.
|
||||||
|
* You can now run commands like `spack spec` and `spack install` and refer to
|
||||||
|
buildcache hashes directly, e.g. `spack install /abc123` (#35042)
|
||||||
|
|
||||||
|
7. **New package and buildcache index websites**
|
||||||
|
|
||||||
|
Our public websites for searching packages have been completely revamped and updated.
|
||||||
|
You can check them out here:
|
||||||
|
|
||||||
|
* *Package Index*: https://packages.spack.io
|
||||||
|
* *Buildcache Index*: https://cache.spack.io
|
||||||
|
|
||||||
|
Both are searchable and more interactive than before. Currently major releases are
|
||||||
|
shown; UI for browsing `develop` snapshots is coming soon.
|
||||||
|
|
||||||
|
8. **Default CMake and Meson build types are now Release**
|
||||||
|
|
||||||
|
Spack has historically defaulted to building with optimization and debugging, but
|
||||||
|
packages like `llvm` can be enormous with debug turned on. Our default build type for
|
||||||
|
all Spack packages is now `Release` (#36679, #37436). This has a number of benefits:
|
||||||
|
|
||||||
|
* much smaller binaries;
|
||||||
|
* higher default optimization level; and
|
||||||
|
* defining `NDEBUG` disables assertions, which may lead to further speedups.
|
||||||
|
|
||||||
|
You can still get the old behavior back through requirements and package preferences.
|
||||||
|
|
||||||
|
## Other new commands and directives
|
||||||
|
|
||||||
|
* `spack checksum` can automatically add new versions to package (#24532)
|
||||||
|
* new command: `spack pkg grep` to easily search package files (#34388)
|
||||||
|
* New `maintainers` directive (#35083)
|
||||||
|
* Add `spack buildcache push` (alias to `buildcache create`) (#34861)
|
||||||
|
* Allow using `-j` to control the parallelism of concretization (#37608)
|
||||||
|
* Add `--exclude` option to 'spack external find' (#35013)
|
||||||
|
|
||||||
|
## Other new features of note
|
||||||
|
|
||||||
|
* editing: add higher-precedence `SPACK_EDITOR` environment variable
|
||||||
|
* Many YAML formatting improvements from updating `ruamel.yaml` to the latest version
|
||||||
|
supporting Python 3.6. (#31091, #24885, #37008).
|
||||||
|
* Requirements and preferences should not define (non-git) versions (#37687, #37747)
|
||||||
|
* Environments now store spack version/commit in `spack.lock` (#32801)
|
||||||
|
* User can specify the name of the `packages` subdirectory in repositories (#36643)
|
||||||
|
* Add container images supporting RHEL alternatives (#36713)
|
||||||
|
* make version(...) kwargs explicit (#36998)
|
||||||
|
|
||||||
|
## Notable refactors
|
||||||
|
|
||||||
|
* buildcache create: reproducible tarballs (#35623)
|
||||||
|
* Bootstrap most of Spack dependencies using environments (#34029)
|
||||||
|
* Split `satisfies(..., strict=True/False)` into two functions (#35681)
|
||||||
|
* spack install: simplify behavior when inside environments (#35206)
|
||||||
|
|
||||||
|
## Binary cache and stack updates
|
||||||
|
|
||||||
|
* Major simplification of CI boilerplate in stacks (#34272, #36045)
|
||||||
|
* Many improvements to our CI pipeline's reliability
|
||||||
|
|
||||||
|
## Removals, Deprecations, and disablements
|
||||||
|
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
|
||||||
|
* Support for Python 2 was deprecated in `v0.19.0` and has been removed. `v0.20.0` only
|
||||||
|
supports Python 3.6 and higher.
|
||||||
|
* Deprecated target names are no longer recognized by Spack. Use generic names instead:
|
||||||
|
* `graviton` is now `cortex_a72`
|
||||||
|
* `graviton2` is now `neoverse_n1`
|
||||||
|
* `graviton3` is now `neoverse_v1`
|
||||||
|
* `blacklist` and `whitelist` in module configuration were deprecated in `v0.19.0` and are
|
||||||
|
removed in this release. Use `exclude` and `include` instead.
|
||||||
|
* The `ignore=` parameter of the `extends()` directive has been removed. It was not used by
|
||||||
|
any builtin packages and is no longer needed to avoid conflicts in environment views (#35588).
|
||||||
|
* Support for the old YAML buildcache format has been removed. It was deprecated in `v0.19.0` (#34347).
|
||||||
|
* `spack find --bootstrap` has been removed. It was deprecated in `v0.19.0`. Use `spack
|
||||||
|
--bootstrap find` instead (#33964).
|
||||||
|
* `spack bootstrap trust` and `spack bootstrap untrust` are now removed, having been
|
||||||
|
deprecated in `v0.19.0`. Use `spack bootstrap enable` and `spack bootstrap disable`.
|
||||||
|
* The `--mirror-name`, `--mirror-url`, and `--directory` options to buildcache and
|
||||||
|
mirror commands were deprecated in `v0.19.0` and have now been removed. They have been
|
||||||
|
replaced by positional arguments (#37457).
|
||||||
|
* Deprecate `env:` as top level environment key (#37424)
|
||||||
|
* deprecate buildcache create --rel, buildcache install --allow-root (#37285)
|
||||||
|
* Support for very old perl-like spec format strings (e.g., `$_$@$%@+$+$=`) has been
|
||||||
|
removed (#37425). This was deprecated in in `v0.15` (#10556).
|
||||||
|
|
||||||
|
## Notable Bugfixes
|
||||||
|
|
||||||
|
* bugfix: don't fetch package metadata for unknown concrete specs (#36990)
|
||||||
|
* Improve package source code context display on error (#37655)
|
||||||
|
* Relax environment manifest filename requirements and lockfile identification criteria (#37413)
|
||||||
|
* `installer.py`: drop build edges of installed packages by default (#36707)
|
||||||
|
* Bugfix: package requirements with git commits (#35057, #36347)
|
||||||
|
* Package requirements: allow single specs in requirement lists (#36258)
|
||||||
|
* conditional variant values: allow boolean (#33939)
|
||||||
|
* spack uninstall: follow run/link edges on --dependents (#34058)
|
||||||
|
|
||||||
|
## Spack community stats
|
||||||
|
|
||||||
|
* 7,179 total packages, 499 new since `v0.19.0`
|
||||||
|
* 329 new Python packages
|
||||||
|
* 31 new R packages
|
||||||
|
* 336 people contributed to this release
|
||||||
|
* 317 committers to packages
|
||||||
|
* 62 committers to core
|
||||||
|
|
||||||
|
|
||||||
# v0.19.1 (2023-02-07)
|
# v0.19.1 (2023-02-07)
|
||||||
|
|
||||||
### Spack Bugfixes
|
### Spack Bugfixes
|
||||||
|
@@ -25,8 +25,6 @@ exit 1
|
|||||||
# Line above is a shell no-op, and ends a python multi-line comment.
|
# Line above is a shell no-op, and ends a python multi-line comment.
|
||||||
# The code above runs this file with our preferred python interpreter.
|
# The code above runs this file with our preferred python interpreter.
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
|
@@ -214,7 +214,7 @@ goto :end_switch
|
|||||||
if defined _sp_args (
|
if defined _sp_args (
|
||||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
|
132
bin/spack.ps1
Normal file
132
bin/spack.ps1
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
# #######################################################################
|
||||||
|
|
||||||
|
function Compare-CommonArgs {
|
||||||
|
$CMDArgs = $args[0]
|
||||||
|
# These aruments take precedence and call for no futher parsing of arguments
|
||||||
|
# invoke actual Spack entrypoint with that context and exit after
|
||||||
|
"--help", "-h", "--version", "-V" | ForEach-Object {
|
||||||
|
$arg_opt = $_
|
||||||
|
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
||||||
|
return $true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return $false
|
||||||
|
}
|
||||||
|
|
||||||
|
function Read-SpackArgs {
|
||||||
|
$SpackCMD_params = @()
|
||||||
|
$SpackSubCommand = $NULL
|
||||||
|
$SpackSubCommandArgs = @()
|
||||||
|
$args_ = $args[0]
|
||||||
|
$args_ | ForEach-Object {
|
||||||
|
if (!$SpackSubCommand) {
|
||||||
|
if($_.SubString(0,1) -eq "-")
|
||||||
|
{
|
||||||
|
$SpackCMD_params += $_
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
$SpackSubCommand = $_
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
$SpackSubCommandArgs += $_
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||||
|
}
|
||||||
|
|
||||||
|
function Invoke-SpackCD {
|
||||||
|
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||||
|
if (($NULL -ne $LOC)){
|
||||||
|
if ( Test-Path -Path $LOC){
|
||||||
|
Set-Location $LOC
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function Invoke-SpackEnv {
|
||||||
|
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack env -h
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||||
|
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
||||||
|
switch ($SubCommandSubCommand) {
|
||||||
|
"activate" {
|
||||||
|
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||||
|
}
|
||||||
|
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||||
|
}
|
||||||
|
elseif (!$SubCommandSubCommandArgs) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||||
|
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"deactivate" {
|
||||||
|
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||||
|
}
|
||||||
|
elseif($SubCommandSubCommandArgs) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||||
|
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function Invoke-SpackLoad {
|
||||||
|
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||||
|
}
|
||||||
|
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||||
|
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||||
|
|
||||||
|
if (Compare-CommonArgs $SpackCMD_params) {
|
||||||
|
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||||
|
exit $LASTEXITCODE
|
||||||
|
}
|
||||||
|
|
||||||
|
# Process Spack commands with special conditions
|
||||||
|
# all other commands are piped directly to Spack
|
||||||
|
switch($SpackSubCommand)
|
||||||
|
{
|
||||||
|
"cd" {Invoke-SpackCD}
|
||||||
|
"env" {Invoke-SpackEnv}
|
||||||
|
"load" {Invoke-SpackLoad}
|
||||||
|
"unload" {Invoke-SpackLoad}
|
||||||
|
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
|
}
|
16
lib/spack/docs/_pygments/style.py
Normal file
16
lib/spack/docs/_pygments/style.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
# We use our own extension of the default style with a few modifications
|
||||||
|
from pygments.styles.default import DefaultStyle
|
||||||
|
from pygments.token import Generic
|
||||||
|
|
||||||
|
|
||||||
|
class SpackStyle(DefaultStyle):
|
||||||
|
styles = DefaultStyle.styles.copy()
|
||||||
|
background_color = "#f4f4f8"
|
||||||
|
styles[Generic.Output] = "#355"
|
||||||
|
styles[Generic.Prompt] = "bold #346ec9"
|
@@ -76,6 +76,53 @@ To build with with ``icx``, do ::
|
|||||||
|
|
||||||
spack install patchelf%oneapi
|
spack install patchelf%oneapi
|
||||||
|
|
||||||
|
|
||||||
|
Using oneAPI Spack environment
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
In this example, we build lammps with ``icx`` using Spack environment for oneAPI packages created by Intel. The
|
||||||
|
compilers are installed with Spack like in example above.
|
||||||
|
|
||||||
|
Install the oneAPI compilers::
|
||||||
|
|
||||||
|
spack install intel-oneapi-compilers
|
||||||
|
|
||||||
|
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||||
|
|
||||||
|
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||||
|
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||||
|
|
||||||
|
Verify that the compilers are available::
|
||||||
|
|
||||||
|
spack compiler list
|
||||||
|
|
||||||
|
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||||
|
|
||||||
|
git clone https://github.com/spack/spack-configs
|
||||||
|
spack env activate spack-configs/INTEL/CPU
|
||||||
|
spack concretize -f
|
||||||
|
|
||||||
|
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||||
|
|
||||||
|
- `GROMACS <https://www.gromacs.org/>`_
|
||||||
|
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||||
|
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||||
|
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||||
|
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||||
|
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||||
|
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||||
|
|
||||||
|
To build lammps with oneAPI compiler from this environment just run::
|
||||||
|
|
||||||
|
spack install lammps
|
||||||
|
|
||||||
|
Compiled binaries can be find using::
|
||||||
|
|
||||||
|
spack cd -i lammps
|
||||||
|
|
||||||
|
You can do the same for all other applications from this environment.
|
||||||
|
|
||||||
|
|
||||||
Using oneAPI MPI to Satisfy a Virtual Dependence
|
Using oneAPI MPI to Satisfy a Virtual Dependence
|
||||||
------------------------------------------------------
|
------------------------------------------------------
|
||||||
|
|
||||||
|
@@ -72,7 +72,7 @@ arguments to the configure phase, you can use:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def configure_args(self, spec, prefix):
|
def configure_args(self):
|
||||||
return ['--no-python-dbus']
|
return ['--no-python-dbus']
|
||||||
|
|
||||||
|
|
||||||
|
@@ -97,9 +97,7 @@ class PatchedPythonDomain(PythonDomain):
|
|||||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||||
if "refspecific" in node:
|
if "refspecific" in node:
|
||||||
del node["refspecific"]
|
del node["refspecific"]
|
||||||
return super(PatchedPythonDomain, self).resolve_xref(
|
return super().resolve_xref(env, fromdocname, builder, typ, target, node, contnode)
|
||||||
env, fromdocname, builder, typ, target, node, contnode
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -149,7 +147,6 @@ def setup(sphinx):
|
|||||||
# Get nice vector graphics
|
# Get nice vector graphics
|
||||||
graphviz_output_format = "svg"
|
graphviz_output_format = "svg"
|
||||||
|
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ["_templates"]
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
@@ -233,30 +230,8 @@ def setup(sphinx):
|
|||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
# show_authors = False
|
# show_authors = False
|
||||||
|
sys.path.append("./_pygments")
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
pygments_style = "style.SpackStyle"
|
||||||
# We use our own extension of the default style with a few modifications
|
|
||||||
from pygments.style import Style
|
|
||||||
from pygments.styles.default import DefaultStyle
|
|
||||||
from pygments.token import Comment, Generic, Text
|
|
||||||
|
|
||||||
|
|
||||||
class SpackStyle(DefaultStyle):
|
|
||||||
styles = DefaultStyle.styles.copy()
|
|
||||||
background_color = "#f4f4f8"
|
|
||||||
styles[Generic.Output] = "#355"
|
|
||||||
styles[Generic.Prompt] = "bold #346ec9"
|
|
||||||
|
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
dist = pkg_resources.Distribution(__file__)
|
|
||||||
sys.path.append(".") # make 'conf' module findable
|
|
||||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
|
||||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
|
||||||
pkg_resources.working_set.add(dist)
|
|
||||||
|
|
||||||
pygments_style = "spack"
|
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
# modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
@@ -341,16 +316,15 @@ class SpackStyle(DefaultStyle):
|
|||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = "Spackdoc"
|
htmlhelp_basename = "Spackdoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output --------------------------------------------------
|
# -- Options for LaTeX output --------------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#'papersize': 'letterpaper',
|
# 'papersize': 'letterpaper',
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
#'pointsize': '10pt',
|
# 'pointsize': '10pt',
|
||||||
# Additional stuff for the LaTeX preamble.
|
# Additional stuff for the LaTeX preamble.
|
||||||
#'preamble': '',
|
# 'preamble': '',
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
@@ -143,6 +143,26 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Amazon Linux 2
|
* - Amazon Linux 2
|
||||||
- ``amazonlinux:2``
|
- ``amazonlinux:2``
|
||||||
- ``spack/amazon-linux``
|
- ``spack/amazon-linux``
|
||||||
|
* - AlmaLinux 8
|
||||||
|
- ``almalinux:8``
|
||||||
|
- ``spack/almalinux8``
|
||||||
|
* - AlmaLinux 9
|
||||||
|
- ``almalinux:9``
|
||||||
|
- ``spack/almalinux9``
|
||||||
|
* - Rocky Linux 8
|
||||||
|
- ``rockylinux:8``
|
||||||
|
- ``spack/rockylinux8``
|
||||||
|
* - Rocky Linux 9
|
||||||
|
- ``rockylinux:9``
|
||||||
|
- ``spack/rockylinux9``
|
||||||
|
* - Fedora Linux 37
|
||||||
|
- ``fedora:37``
|
||||||
|
- ``spack/fedora37``
|
||||||
|
* - Fedora Linux 38
|
||||||
|
- ``fedora:38``
|
||||||
|
- ``spack/fedora38``
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
All the images are tagged with the corresponding release of Spack:
|
All the images are tagged with the corresponding release of Spack:
|
||||||
|
|
||||||
@@ -616,7 +636,7 @@ to customize the generation of container recipes:
|
|||||||
- No
|
- No
|
||||||
* - ``os_packages:command``
|
* - ``os_packages:command``
|
||||||
- Tool used to manage system packages
|
- Tool used to manage system packages
|
||||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||||
- Only with custom base images
|
- Only with custom base images
|
||||||
* - ``os_packages:update``
|
* - ``os_packages:update``
|
||||||
- Whether or not to update the list of available packages
|
- Whether or not to update the list of available packages
|
||||||
|
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
|||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
projections:
|
projections:
|
||||||
zlib: {name}-{version}
|
zlib: "{name}-{version}"
|
||||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||||
|
|
||||||
The entries in the projections configuration file must all be either
|
The entries in the projections configuration file must all be either
|
||||||
specs or the keyword ``all``. For each spec, the projection used will
|
specs or the keyword ``all``. For each spec, the projection used will
|
||||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
|||||||
example/push/%: example/install/%
|
example/push/%: example/install/%
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
$(info About to push $(SPEC) to a buildcache)
|
$(info About to push $(SPEC) to a buildcache)
|
||||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||||
$(info Updating the buildcache index)
|
$(info Updating the buildcache index)
|
||||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||||
$(info Done!)
|
$(info Done!)
|
||||||
@touch $@
|
@touch $@
|
||||||
|
@@ -317,7 +317,7 @@ installed, but you know that new compilers have been added to your
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module load gcc-4.9.0
|
$ module load gcc/4.9.0
|
||||||
$ spack compiler find
|
$ spack compiler find
|
||||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||||
gcc@4.9.0
|
gcc@4.9.0
|
||||||
|
@@ -76,6 +76,7 @@ or refer to the full manual below.
|
|||||||
chain
|
chain
|
||||||
extensions
|
extensions
|
||||||
pipelines
|
pipelines
|
||||||
|
signing
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
@@ -35,27 +35,27 @@ showing lots of installed packages:
|
|||||||
$ module avail
|
$ module avail
|
||||||
|
|
||||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||||
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
|
||||||
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
|
||||||
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
|
||||||
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
|
||||||
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
|
||||||
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
|
||||||
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
|
||||||
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
|
||||||
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
|
||||||
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
|
||||||
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
|
||||||
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
|
||||||
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
|
||||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
|
||||||
|
|
||||||
The names should look familiar, as they resemble the output from ``spack find``.
|
The names should look familiar, as they resemble the output from ``spack find``.
|
||||||
For example, you could type the following command to load the ``cmake`` module:
|
For example, you could type the following command to load the ``cmake`` module:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
|
||||||
|
|
||||||
Neither of these is particularly pretty, easy to remember, or easy to
|
Neither of these is particularly pretty, easy to remember, or easy to
|
||||||
type. Luckily, Spack offers many facilities for customizing the module
|
type. Luckily, Spack offers many facilities for customizing the module
|
||||||
@@ -779,35 +779,35 @@ cut-and-pasted into a shell script. For example:
|
|||||||
|
|
||||||
$ spack module tcl loads --dependencies py-numpy git
|
$ spack module tcl loads --dependencies py-numpy git
|
||||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||||
|
|
||||||
The script may be further edited by removing unnecessary modules.
|
The script may be further edited by removing unnecessary modules.
|
||||||
|
|
||||||
@@ -826,12 +826,12 @@ For example, consider the following on one system:
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module avail
|
$ module avail
|
||||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|
||||||
$ spack module tcl loads antlr # WRONG!
|
$ spack module tcl loads antlr # WRONG!
|
||||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|
||||||
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
||||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
@@ -121,7 +121,7 @@ Since v0.19, Spack supports two ways of writing a package recipe. The most comm
|
|||||||
|
|
||||||
def url_for_version(self, version):
|
def url_for_version(self, version):
|
||||||
if version >= Version("2.1.1"):
|
if version >= Version("2.1.1"):
|
||||||
return super(Openjpeg, self).url_for_version(version)
|
return super().url_for_version(version)
|
||||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||||
return url_fmt.format(version)
|
return url_fmt.format(version)
|
||||||
|
|
||||||
@@ -155,7 +155,7 @@ builder class explicitly. Using the same example as above, this reads:
|
|||||||
|
|
||||||
def url_for_version(self, version):
|
def url_for_version(self, version):
|
||||||
if version >= Version("2.1.1"):
|
if version >= Version("2.1.1"):
|
||||||
return super(Openjpeg, self).url_for_version(version)
|
return super().url_for_version(version)
|
||||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||||
return url_fmt.format(version)
|
return url_fmt.format(version)
|
||||||
|
|
||||||
@@ -3071,7 +3071,7 @@ follows:
|
|||||||
# The library provided by the bar virtual package
|
# The library provided by the bar virtual package
|
||||||
@property
|
@property
|
||||||
def bar_libs(self):
|
def bar_libs(self):
|
||||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||||
|
|
||||||
# The baz virtual package home
|
# The baz virtual package home
|
||||||
@property
|
@property
|
||||||
|
@@ -1,13 +1,8 @@
|
|||||||
# These dependencies should be installed using pip in order
|
sphinx==6.2.1
|
||||||
# to build the documentation.
|
sphinxcontrib-programoutput==0.17
|
||||||
|
sphinx_design==0.4.1
|
||||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
sphinx-rtd-theme==1.2.2
|
||||||
sphinxcontrib-programoutput
|
python-levenshtein==0.21.1
|
||||||
sphinx-design
|
docutils==0.18.1
|
||||||
sphinx-rtd-theme
|
pygments==2.15.1
|
||||||
python-levenshtein
|
urllib3==2.0.3
|
||||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
|
||||||
# https://stackoverflow.com/questions/67542699
|
|
||||||
docutils <0.17
|
|
||||||
pygments <2.13
|
|
||||||
urllib3 <2
|
|
||||||
|
484
lib/spack/docs/signing.rst
Normal file
484
lib/spack/docs/signing.rst
Normal file
@@ -0,0 +1,484 @@
|
|||||||
|
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _signing:
|
||||||
|
|
||||||
|
=====================
|
||||||
|
Spack Package Signing
|
||||||
|
=====================
|
||||||
|
|
||||||
|
The goal of package signing in Spack is to provide data integrity
|
||||||
|
assurances around official packages produced by the automated Spack CI
|
||||||
|
pipelines. These assurances directly address the security of Spack’s
|
||||||
|
software supply chain by explaining why a security-conscious user can
|
||||||
|
be reasonably justified in the belief that packages installed via Spack
|
||||||
|
have an uninterrupted auditable trail back to change management
|
||||||
|
decisions judged to be appropriate by the Spack maintainers. This is
|
||||||
|
achieved through cryptographic signing of packages built by Spack CI
|
||||||
|
pipelines based on code that has been transparently reviewed and
|
||||||
|
approved on GitHub. This document describes the signing process for
|
||||||
|
interested users.
|
||||||
|
|
||||||
|
.. _risks:
|
||||||
|
|
||||||
|
------------------------------
|
||||||
|
Risks, Impact and Threat Model
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
This document addresses the approach taken to safeguard Spack’s
|
||||||
|
reputation with regard to the integrity of the package data produced by
|
||||||
|
Spack’s CI pipelines. It does not address issues of data confidentiality
|
||||||
|
(Spack is intended to be largely open source) or availability (efforts
|
||||||
|
are described elsewhere). With that said the main reputational risk can
|
||||||
|
be broadly categorized as a loss of faith in the data integrity due to a
|
||||||
|
breach of the private key used to sign packages. Remediation of a
|
||||||
|
private key breach would require republishing the public key with a
|
||||||
|
revocation certificate, generating a new signing key, an assessment and
|
||||||
|
potential rebuild/resigning of all packages since the key was breached,
|
||||||
|
and finally direct intervention by every spack user to update their copy
|
||||||
|
of Spack’s public keys used for local verification.
|
||||||
|
|
||||||
|
The primary threat model used in mitigating the risks of these stated
|
||||||
|
impacts is one of individual error not malicious intent or insider
|
||||||
|
threat. The primary objective is to avoid the above impacts by making a
|
||||||
|
private key breach nearly impossible due to oversight or configuration
|
||||||
|
error. Obvious and straightforward measures are taken to mitigate issues
|
||||||
|
of malicious interference in data integrity and insider threats but
|
||||||
|
these attack vectors are not systematically addressed. It should be hard
|
||||||
|
to exfiltrate the private key intentionally, and almost impossible to
|
||||||
|
leak the key by accident.
|
||||||
|
|
||||||
|
.. _overview:
|
||||||
|
|
||||||
|
-----------------
|
||||||
|
Pipeline Overview
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Spack pipelines build software through progressive stages where packages
|
||||||
|
in later stages nominally depend on packages built in earlier stages.
|
||||||
|
For both technical and design reasons these dependencies are not
|
||||||
|
implemented through the default GitLab artifacts mechanism; instead
|
||||||
|
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
||||||
|
retrieved by subsequent stages in the pipeline. Two broad categories of
|
||||||
|
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
||||||
|
pipelines.
|
||||||
|
|
||||||
|
- PR pipelines are launched in response to pull requests made by
|
||||||
|
trusted and untrusted users. Packages built on these pipelines upload
|
||||||
|
code to quarantined AWS S3 locations which cache the built packages
|
||||||
|
for the purposes of review and iteration on the changes proposed in
|
||||||
|
the pull request. Packages built on PR pipelines can come from
|
||||||
|
untrusted users so signing of these pipelines is not implemented.
|
||||||
|
Jobs in these pipelines are executed via normal GitLab runners both
|
||||||
|
within the AWS GitLab infrastructure and at affiliated institutions.
|
||||||
|
- Develop and Release pipelines **sign** the packages they produce and carry
|
||||||
|
strong integrity assurances that trace back to auditable change management
|
||||||
|
decisions. These pipelines only run after members from a trusted group of
|
||||||
|
reviewers verify that the proposed changes in a pull request are appropriate.
|
||||||
|
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
||||||
|
GitLab runners which provide access to the required signing keys within the
|
||||||
|
job. Intermediary keys are used to sign packages in each stage of the
|
||||||
|
pipeline as they are built and a final job officially signs each package
|
||||||
|
external to any specific packages’ build environment. An intermediate key
|
||||||
|
exists in the AWS infrastructure and for each affiliated instritution that
|
||||||
|
maintains protected runners. The runners that execute these pipelines
|
||||||
|
exclusively accept jobs from protected branches meaning the intermediate keys
|
||||||
|
are never exposed to unreviewed code and the official keys are never exposed
|
||||||
|
to any specific build environment.
|
||||||
|
|
||||||
|
.. _key_architecture:
|
||||||
|
|
||||||
|
----------------
|
||||||
|
Key Architecture
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
||||||
|
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
||||||
|
called specs. Two classes of GPG keys are involved in the process to reduce the
|
||||||
|
impact of an individual private key compromise, these key classes are the
|
||||||
|
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
||||||
|
sub-keys that are used exclusively for signing packages. This can be confusing
|
||||||
|
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
||||||
|
Each key has a private and a public component as well as one or more identities
|
||||||
|
and zero or more signatures.
|
||||||
|
|
||||||
|
-------------------
|
||||||
|
Intermediate CI Key
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
The Intermediate key class is used to sign and verify packages between stages
|
||||||
|
within a develop or release pipeline. An intermediate key exists for the AWS
|
||||||
|
infrastructure as well as each affiliated institution that maintains protected
|
||||||
|
runners. These intermediate keys are made available to the GitLab execution
|
||||||
|
environment building the package so that the package’s dependencies may be
|
||||||
|
verified by the Signing Intermediate CI Public Key and the final package may be
|
||||||
|
signed by the Signing Intermediate CI Private Key.
|
||||||
|
|
||||||
|
|
||||||
|
+---------------------------------------------------------------------------------------------------------+
|
||||||
|
| **Intermediate CI Key (GPG)** |
|
||||||
|
+==================================================+======================================================+
|
||||||
|
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
||||||
|
+--------------------------------------------------+------------------------------------------------------+
|
||||||
|
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
||||||
|
+--------------------------------------------------+------------------------------------------------------+
|
||||||
|
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
||||||
|
+---------------------------------------------------------------------------------------------------------+
|
||||||
|
| Signatures: None |
|
||||||
|
+---------------------------------------------------------------------------------------------------------+
|
||||||
|
|
||||||
|
|
||||||
|
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
||||||
|
stored offline completely separate from Spack’s infrastructure. This allows the
|
||||||
|
core development team to append revocation certificates to the GPG key and
|
||||||
|
issue new sub-keys for use in the pipeline. It is our expectation that this
|
||||||
|
will happen on a semi regular basis. A corollary of this is that *this key
|
||||||
|
should not be used to verify package integrity outside the internal CI process.*
|
||||||
|
|
||||||
|
----------------
|
||||||
|
Reputational Key
|
||||||
|
----------------
|
||||||
|
|
||||||
|
The Reputational Key is the public facing key used to sign complete groups of
|
||||||
|
development and release packages. Only one key pair exsits in this class of
|
||||||
|
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||||
|
used to verify package integrity. At the end of develop and release pipeline a
|
||||||
|
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||||
|
verifies they were signed with an Intermediate CI Key, then strips the
|
||||||
|
Intermediate CI Key signature from the package and re-signs them with the
|
||||||
|
Signing Reputational Private Key. The officially signed packages are then
|
||||||
|
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
||||||
|
reputational key into this final job is done to prevent leakage of the key in a
|
||||||
|
spack package. Because the Signing Reputational Private Key is never exposed to
|
||||||
|
a build job it cannot accidentally end up in any built package.
|
||||||
|
|
||||||
|
|
||||||
|
+---------------------------------------------------------------------------------------------------------+
|
||||||
|
| **Reputational Key (GPG)** |
|
||||||
|
+==================================================+======================================================+
|
||||||
|
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
||||||
|
+--------------------------------------------------+------------------------------------------------------+
|
||||||
|
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
||||||
|
+--------------------------------------------------+------------------------------------------------------+
|
||||||
|
| Identity: “Spack Project <maintainers@spack.io>” |
|
||||||
|
+---------------------------------------------------------------------------------------------------------+
|
||||||
|
| Signatures: Signed by core development team [#f1]_ |
|
||||||
|
+---------------------------------------------------------------------------------------------------------+
|
||||||
|
|
||||||
|
The Root Reputational Private Key is stripped out of the GPG key and stored
|
||||||
|
offline completely separate from Spack’s infrastructure. This allows the core
|
||||||
|
development team to append revocation certificates to the GPG key in the
|
||||||
|
unlikely event that the Signing Reputation Private Key is compromised. In
|
||||||
|
general it is the expectation that rotating this key will happen infrequently if
|
||||||
|
at all. This should allow relatively transparent verification for the end-user
|
||||||
|
community without needing deep familiarity with GnuPG or Public Key
|
||||||
|
Infrastructure.
|
||||||
|
|
||||||
|
|
||||||
|
.. _build_cache_format:
|
||||||
|
|
||||||
|
------------------
|
||||||
|
Build Cache Format
|
||||||
|
------------------
|
||||||
|
|
||||||
|
A binary package consists of a metadata file unambiguously defining the
|
||||||
|
built package (and including other details such as how to relocate it)
|
||||||
|
and the installation directory of the package stored as a compressed
|
||||||
|
archive file. The metadata files can either be unsigned, in which case
|
||||||
|
the contents are simply the json-serialized concrete spec plus metadata,
|
||||||
|
or they can be signed, in which case the json-serialized concrete spec
|
||||||
|
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||||
|
metadata files are named to indicate the operating system and
|
||||||
|
architecture for which the package was built as well as the compiler
|
||||||
|
used to build it and the packages name and version. For example::
|
||||||
|
|
||||||
|
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||||
|
|
||||||
|
would contain the concrete spec and binary metadata for a binary package
|
||||||
|
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||||
|
architecture. The id of the built package exists in the name of the file
|
||||||
|
as well (after the package name and version) and in this case begins
|
||||||
|
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||||
|
other built packages with the same os/arch, compiler, name, and version.
|
||||||
|
Below is an example of a signed binary package metadata file. Such a
|
||||||
|
file would live in the ``build_cache`` directory of a binary mirror::
|
||||||
|
|
||||||
|
-----BEGIN PGP SIGNED MESSAGE-----
|
||||||
|
Hash: SHA512
|
||||||
|
|
||||||
|
{
|
||||||
|
"spec": {
|
||||||
|
<concrete-spec-contents-omitted>
|
||||||
|
},
|
||||||
|
|
||||||
|
"buildcache_layout_version": 1,
|
||||||
|
"binary_cache_checksum": {
|
||||||
|
"hash_algorithm": "sha256",
|
||||||
|
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||||
|
},
|
||||||
|
|
||||||
|
"buildinfo": {
|
||||||
|
"relative_prefix":
|
||||||
|
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||||
|
"relative_rpaths": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
-----BEGIN PGP SIGNATURE-----
|
||||||
|
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||||
|
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||||
|
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||||
|
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||||
|
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||||
|
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||||
|
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||||
|
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||||
|
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||||
|
j3DXty57
|
||||||
|
=3gvm
|
||||||
|
-----END PGP SIGNATURE-----
|
||||||
|
|
||||||
|
If a user has trusted the public key associated with the private key
|
||||||
|
used to sign the above spec file, the signature can be verified with
|
||||||
|
gpg, as follows::
|
||||||
|
|
||||||
|
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||||
|
|
||||||
|
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||||
|
of the ``.spack`` file containing the actual installation. The checksum should
|
||||||
|
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||||
|
contents have not changed since the binary spec plus metadata were signed. The
|
||||||
|
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||||
|
install tree. These files, along with the metadata files, live within the
|
||||||
|
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||||
|
|
||||||
|
build_cache/
|
||||||
|
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||||
|
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||||
|
# clearsigned metadata (same as above, but signed)
|
||||||
|
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||||
|
<arch>/
|
||||||
|
<compiler>/
|
||||||
|
<name>-<ver>/
|
||||||
|
# tar.gz-compressed prefix (may support more compression formats later)
|
||||||
|
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||||
|
|
||||||
|
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||||
|
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||||
|
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||||
|
containing the install tree.
|
||||||
|
|
||||||
|
.. _internal_implementation:
|
||||||
|
|
||||||
|
-----------------------
|
||||||
|
Internal Implementation
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
The technical implementation of the pipeline signing process includes components
|
||||||
|
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||||
|
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||||
|
implementation in two interdependent sections. The first addresses how secrets
|
||||||
|
are managed through the lifecycle of a develop or release pipeline. The second
|
||||||
|
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||||
|
support secure automated signing.
|
||||||
|
|
||||||
|
Secrets Management
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
As stated above the Root Private Keys (intermediate and reputational)
|
||||||
|
are stripped from the GPG keys and stored outside Spack’s
|
||||||
|
infrastructure.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
**TODO**
|
||||||
|
- Explanation here about where and how access is handled for these keys.
|
||||||
|
- Both Root private keys are protected with strong passwords
|
||||||
|
- Who has access to these and how?
|
||||||
|
|
||||||
|
**Intermediate CI Key**
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||||
|
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||||
|
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||||
|
|
||||||
|
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||||
|
exported as an ASCII armored file and stored in a Kubernetes secret called
|
||||||
|
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
||||||
|
contains an ASCII-armored export of just the *public* components of the
|
||||||
|
Reputational Key. This secret also contains the *public* components of each of
|
||||||
|
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||||
|
to verify dependent packages which may have been found in the public mirror or
|
||||||
|
built by a protected job running on an affiliated institution's infrastrcuture
|
||||||
|
in an earlier stage of the pipeline.
|
||||||
|
|
||||||
|
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||||
|
the following way:
|
||||||
|
|
||||||
|
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
||||||
|
a job tagged ``protected`` from a protected GitLab branch. (See
|
||||||
|
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||||
|
2. Based on its configuration, the runner creates a job Pod in the
|
||||||
|
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
||||||
|
Kubernetes secret into the build container
|
||||||
|
3. The Intermediate CI Key, affiliated institutions' public key and the
|
||||||
|
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||||
|
sub-command. This is initiated by the job’s build script which is created by
|
||||||
|
the generate job at the beginning of the pipeline.
|
||||||
|
4. Assuming the package has dependencies those specs are verified using
|
||||||
|
the keyring.
|
||||||
|
5. The package is built and the spec.json is generated
|
||||||
|
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||||
|
build cache.
|
||||||
|
|
||||||
|
**Reputational Key**
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Because of the increased impact to end users in the case of a private
|
||||||
|
key breach, the Reputational Key is managed separately from the
|
||||||
|
Intermediate CI Keys and has additional controls. First, the Reputational
|
||||||
|
Key was generated outside of Spack’s infrastructure and has been signed
|
||||||
|
by the core development team. The Reputational Key (along with the
|
||||||
|
Signing Reputational Private Key) was then ASCII armor exported to a
|
||||||
|
file. Unlike the Intermediate CI Key this exported file is not stored as
|
||||||
|
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
||||||
|
itself*\ is encrypted and stored in Kubernetes as the
|
||||||
|
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
||||||
|
|
||||||
|
The encryption of the exported Reputational Key (including the Signing
|
||||||
|
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
||||||
|
keys
|
||||||
|
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
||||||
|
The private key material is decrypted and imported at the time of signing into a
|
||||||
|
memory mounted temporary directory holding the keychain. The signing job uses
|
||||||
|
the `AWS Encryption SDK
|
||||||
|
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
||||||
|
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
||||||
|
decrypt the key is granted to the job Pod through a Kubernetes service account
|
||||||
|
specifically used for this, and only this, function. Finally, for convenience
|
||||||
|
sake, this same secret contains an ASCII-armored export of the *public*
|
||||||
|
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
||||||
|
signing script to verify that packages were built by the pipeline (both on AWS
|
||||||
|
or at affiliated institutions), or signed previously as a part of a different
|
||||||
|
pipeline. This is is done *before* importing decrypting and importing the
|
||||||
|
Signing Reputational Private Key material and officially signing the packages.
|
||||||
|
|
||||||
|
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
||||||
|
following way:
|
||||||
|
|
||||||
|
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
||||||
|
up a job tagged ``notary`` from a protected GitLab branch (See
|
||||||
|
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||||
|
2. Based on its configuration, the runner creates a job pod in the
|
||||||
|
pipeline namespace. The job is run in a stripped down purpose-built
|
||||||
|
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
||||||
|
configured to only allow running jobs with this image.
|
||||||
|
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
||||||
|
a path on disk. Note that this becomes several files on disk, the
|
||||||
|
public components of the Intermediate CI Keys, the public components
|
||||||
|
of the Reputational CI, and an AWS KMS encrypted file containing the
|
||||||
|
Singing Reputational Private Key.
|
||||||
|
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||||
|
directory where the GnuPG keyring will be created to verify, and
|
||||||
|
then resign the package specs.
|
||||||
|
5. The job script syncs all spec.json.sig files from the build cache to
|
||||||
|
a working directory in the job’s execution environment.
|
||||||
|
6. The job script then runs the ``sign.sh`` script built into the
|
||||||
|
notary Docker image.
|
||||||
|
7. The ``sign.sh`` script imports the public components of the
|
||||||
|
Reputational and Intermediate CI Keys and uses them to verify good
|
||||||
|
signatures on the spec.json.sig files. If any signed spec does not
|
||||||
|
verify the job immediately fails.
|
||||||
|
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||||
|
the spec json data from the signed file in preparation for being
|
||||||
|
re-signed with the Reputational Key.
|
||||||
|
9. The private components of the Reputational Key are decrypted to
|
||||||
|
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||||
|
–import …`` statement which imports the key into the
|
||||||
|
keyring mounted in-memory.
|
||||||
|
10. The private key is then used to sign each of the json specs and the
|
||||||
|
keyring is removed from disk.
|
||||||
|
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||||
|
public signing of the packages for the develop or release pipeline
|
||||||
|
that created them is complete.
|
||||||
|
|
||||||
|
Non service-account access to the private components of the Reputational
|
||||||
|
Key that are managed through access to the symmetric secret in KMS used
|
||||||
|
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
||||||
|
- See:\ `Encryption SDK
|
||||||
|
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
||||||
|
A small trusted subset of the core development team are the only
|
||||||
|
individuals with access to this symmetric key.
|
||||||
|
|
||||||
|
.. _protected_runners:
|
||||||
|
|
||||||
|
Protected Runners and Reserved Tags
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Spack has a large number of Gitlab Runners operating in its build farm.
|
||||||
|
These include runners deployed in the AWS Kubernetes cluster as well as
|
||||||
|
runners deployed at affiliated institutions. The majority of runners are
|
||||||
|
shared runners that operate across projects in gitlab.spack.io. These
|
||||||
|
runners pick up jobs primarily from the spack/spack project and execute
|
||||||
|
them in PR pipelines.
|
||||||
|
|
||||||
|
A small number of runners operating on AWS and at affiliated institutions are
|
||||||
|
registered as specific *protected* runners on the spack/spack project. In
|
||||||
|
addition to protected runners there are protected branches on the spack/spack
|
||||||
|
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
||||||
|
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
||||||
|
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
||||||
|
sure jobs are routed to the correct runners, these tags are ``public``,
|
||||||
|
``protected``, and ``notary``. Understanding how all this works together to
|
||||||
|
protect secrets and provide integrity assurances can be a little confusing so
|
||||||
|
lets break these down:
|
||||||
|
|
||||||
|
- **Protected Branches**- Protected branches in Spack prevent anyone
|
||||||
|
other than Maintainers in GitLab from pushing code. In the case of
|
||||||
|
Spack the only Maintainer level entity pushing code to protected
|
||||||
|
branches is Spack bot. Protecting branches also marks them in such a
|
||||||
|
way that Protected Runners will only run jobs from those branches
|
||||||
|
- **Protected Runners**- Protected Runners only run jobs from protected
|
||||||
|
branches. Because protected runners have access to secrets, it's critical
|
||||||
|
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
||||||
|
would be possible for a PR branch to tag a job in such a way that a protected
|
||||||
|
runner executed that job and mounted secrets into a code execution
|
||||||
|
environment that had not been reviewed by Spack maintainers. Note however
|
||||||
|
that in the absence of tagging used to route jobs, public runners *could* run
|
||||||
|
jobs from protected branches. No secrets would be at risk of being breached
|
||||||
|
because non-protected runners do not have access to those secrets; lack of
|
||||||
|
secrets would, however, cause the jobs to fail.
|
||||||
|
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
||||||
|
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
||||||
|
are *job* tags not git tags). These tags are “public”, “private”, and
|
||||||
|
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
||||||
|
executed via a ``generate`` job. The generate job code systematically ensures
|
||||||
|
that no user defined configuration sets these tags. Instead, the ``generate``
|
||||||
|
job sets these tags based on rules related to the branch where this pipeline
|
||||||
|
originated. If the job is a part of a pipeline on a PR branch it sets the
|
||||||
|
``public`` tag. If the job is part of a pipeline on a protected branch it
|
||||||
|
sets the ``protected`` tag. Finally if the job is the package signing job and
|
||||||
|
it is running on a pipeline that is part of a protected branch then it sets
|
||||||
|
the ``notary`` tag.
|
||||||
|
|
||||||
|
Protected Runners are configured to only run jobs from protected branches. Only
|
||||||
|
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
||||||
|
``notary`` tags. This tightly couples jobs on protected branches to protected
|
||||||
|
runners that provide access to the secrets required to sign the built packages.
|
||||||
|
The secrets are can **only** be accessed via:
|
||||||
|
|
||||||
|
1. Runners under direct control of the core development team.
|
||||||
|
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
||||||
|
3. By code running the automated pipeline that has been reviewed by the
|
||||||
|
Spack maintainers and judged to be appropriate.
|
||||||
|
|
||||||
|
Other attempts (either through malicious intent or incompetence) can at
|
||||||
|
worst grab jobs intended for protected runners which will cause those
|
||||||
|
jobs to fail alerting both Spack maintainers and the core development
|
||||||
|
team.
|
||||||
|
|
||||||
|
.. [#f1]
|
||||||
|
The Reputational Key has also cross signed core development team
|
||||||
|
keys.
|
428
lib/spack/env/cc
vendored
428
lib/spack/env/cc
vendored
@@ -416,30 +416,14 @@ input_command="$*"
|
|||||||
# The lists are all bell-separated to be as flexible as possible, as their
|
# The lists are all bell-separated to be as flexible as possible, as their
|
||||||
# contents may come from the command line, from ' '-separated lists,
|
# contents may come from the command line, from ' '-separated lists,
|
||||||
# ':'-separated lists, etc.
|
# ':'-separated lists, etc.
|
||||||
include_dirs_list=""
|
|
||||||
lib_dirs_list=""
|
|
||||||
rpath_dirs_list=""
|
|
||||||
system_include_dirs_list=""
|
|
||||||
system_lib_dirs_list=""
|
|
||||||
system_rpath_dirs_list=""
|
|
||||||
isystem_system_include_dirs_list=""
|
|
||||||
isystem_include_dirs_list=""
|
|
||||||
libs_list=""
|
|
||||||
other_args_list=""
|
|
||||||
|
|
||||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
|
||||||
wl_expect_rpath=no
|
|
||||||
|
|
||||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
|
||||||
xlinker_expect_rpath=no
|
|
||||||
|
|
||||||
parse_Wl() {
|
parse_Wl() {
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
if [ "$wl_expect_rpath" = yes ]; then
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
if system_dir "$1"; then
|
if system_dir "$1"; then
|
||||||
append system_rpath_dirs_list "$1"
|
append return_system_rpath_dirs_list "$1"
|
||||||
else
|
else
|
||||||
append rpath_dirs_list "$1"
|
append return_rpath_dirs_list "$1"
|
||||||
fi
|
fi
|
||||||
wl_expect_rpath=no
|
wl_expect_rpath=no
|
||||||
else
|
else
|
||||||
@@ -449,9 +433,9 @@ parse_Wl() {
|
|||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
elif system_dir "$arg"; then
|
||||||
append system_rpath_dirs_list "$arg"
|
append return_system_rpath_dirs_list "$arg"
|
||||||
else
|
else
|
||||||
append rpath_dirs_list "$arg"
|
append return_rpath_dirs_list "$arg"
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
@@ -459,9 +443,9 @@ parse_Wl() {
|
|||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
elif system_dir "$arg"; then
|
||||||
append system_rpath_dirs_list "$arg"
|
append return_system_rpath_dirs_list "$arg"
|
||||||
else
|
else
|
||||||
append rpath_dirs_list "$arg"
|
append return_rpath_dirs_list "$arg"
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
@@ -475,7 +459,7 @@ parse_Wl() {
|
|||||||
return 1
|
return 1
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
append other_args_list "-Wl,$1"
|
append return_other_args_list "-Wl,$1"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
fi
|
fi
|
||||||
@@ -483,177 +467,210 @@ parse_Wl() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
categorize_arguments() {
|
||||||
|
|
||||||
while [ $# -ne 0 ]; do
|
unset IFS
|
||||||
|
|
||||||
# an RPATH to be added after the case statement.
|
return_other_args_list=""
|
||||||
rp=""
|
return_isystem_was_used=""
|
||||||
|
return_isystem_system_include_dirs_list=""
|
||||||
|
return_isystem_include_dirs_list=""
|
||||||
|
return_system_include_dirs_list=""
|
||||||
|
return_include_dirs_list=""
|
||||||
|
return_system_lib_dirs_list=""
|
||||||
|
return_lib_dirs_list=""
|
||||||
|
return_system_rpath_dirs_list=""
|
||||||
|
return_rpath_dirs_list=""
|
||||||
|
|
||||||
# Multiple consecutive spaces in the command line can
|
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||||
# result in blank arguments
|
wl_expect_rpath=no
|
||||||
if [ -z "$1" ]; then
|
|
||||||
shift
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
xlinker_expect_rpath=no
|
||||||
eval "\
|
|
||||||
case \"\$1\" in
|
while [ $# -ne 0 ]; do
|
||||||
$SPACK_COMPILER_FLAGS_KEEP)
|
|
||||||
append other_args_list \"\$1\"
|
# an RPATH to be added after the case statement.
|
||||||
|
rp=""
|
||||||
|
|
||||||
|
# Multiple consecutive spaces in the command line can
|
||||||
|
# result in blank arguments
|
||||||
|
if [ -z "$1" ]; then
|
||||||
|
shift
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||||
|
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||||
|
eval "\
|
||||||
|
case \"\$1\" in
|
||||||
|
$SPACK_COMPILER_FLAGS_KEEP)
|
||||||
|
append return_other_args_list \"\$1\"
|
||||||
|
shift
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
"
|
||||||
|
fi
|
||||||
|
# the replace list is a space-separated list of pipe-separated pairs,
|
||||||
|
# the first in each pair is the original prefix to be matched, the
|
||||||
|
# second is the replacement prefix
|
||||||
|
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||||
|
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||||
|
before=${rep%|*}
|
||||||
|
after=${rep#*|}
|
||||||
|
eval "\
|
||||||
|
stripped=\"\${1##$before}\"
|
||||||
|
"
|
||||||
|
if [ "$stripped" = "$1" ] ; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
replaced="$after$stripped"
|
||||||
|
|
||||||
|
# it matched, remove it
|
||||||
shift
|
shift
|
||||||
continue
|
|
||||||
|
if [ -z "$replaced" ] ; then
|
||||||
|
# completely removed, continue OUTER loop
|
||||||
|
continue 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# re-build argument list with replacement
|
||||||
|
set -- "$replaced" "$@"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
-isystem*)
|
||||||
|
arg="${1#-isystem}"
|
||||||
|
return_isystem_was_used=true
|
||||||
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append return_isystem_system_include_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append return_isystem_include_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-I*)
|
||||||
|
arg="${1#-I}"
|
||||||
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append return_system_include_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append return_include_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-L*)
|
||||||
|
arg="${1#-L}"
|
||||||
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append return_system_lib_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append return_lib_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-l*)
|
||||||
|
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||||
|
# and passed by ifx to the linker, which confuses it with a
|
||||||
|
# library. Filter it out.
|
||||||
|
# TODO: generalize filtering of args with an env var, so that
|
||||||
|
# TODO: we do not have to special case this here.
|
||||||
|
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||||
|
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||||
|
shift
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
arg="${1#-l}"
|
||||||
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
append return_other_args_list "-l$arg"
|
||||||
|
;;
|
||||||
|
-Wl,*)
|
||||||
|
IFS=,
|
||||||
|
if ! parse_Wl ${1#-Wl,}; then
|
||||||
|
append return_other_args_list "$1"
|
||||||
|
fi
|
||||||
|
unset IFS
|
||||||
|
;;
|
||||||
|
-Xlinker)
|
||||||
|
shift
|
||||||
|
if [ $# -eq 0 ]; then
|
||||||
|
# -Xlinker without value: let the compiler error about it.
|
||||||
|
append return_other_args_list -Xlinker
|
||||||
|
xlinker_expect_rpath=no
|
||||||
|
break
|
||||||
|
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||||
|
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||||
|
if system_dir "$1"; then
|
||||||
|
append return_system_rpath_dirs_list "$1"
|
||||||
|
else
|
||||||
|
append return_rpath_dirs_list "$1"
|
||||||
|
fi
|
||||||
|
xlinker_expect_rpath=no
|
||||||
|
else
|
||||||
|
case "$1" in
|
||||||
|
-rpath=*)
|
||||||
|
arg="${1#-rpath=}"
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append return_system_rpath_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append return_rpath_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
--rpath=*)
|
||||||
|
arg="${1#--rpath=}"
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append return_system_rpath_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append return_rpath_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-rpath|--rpath)
|
||||||
|
xlinker_expect_rpath=yes
|
||||||
|
;;
|
||||||
|
"$dtags_to_strip")
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
append return_other_args_list -Xlinker
|
||||||
|
append return_other_args_list "$1"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
"$dtags_to_strip")
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
append return_other_args_list "$1"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
"
|
shift
|
||||||
fi
|
done
|
||||||
# the replace list is a space-separated list of pipe-separated pairs,
|
|
||||||
# the first in each pair is the original prefix to be matched, the
|
|
||||||
# second is the replacement prefix
|
|
||||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
|
||||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
|
||||||
before=${rep%|*}
|
|
||||||
after=${rep#*|}
|
|
||||||
eval "\
|
|
||||||
stripped=\"\${1##$before}\"
|
|
||||||
"
|
|
||||||
if [ "$stripped" = "$1" ] ; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
replaced="$after$stripped"
|
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||||
|
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||||
# it matched, remove it
|
# parsing.
|
||||||
shift
|
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||||
|
append return_other_args_list -Xlinker
|
||||||
if [ -z "$replaced" ] ; then
|
append return_other_args_list -rpath
|
||||||
# completely removed, continue OUTER loop
|
|
||||||
continue 2
|
|
||||||
fi
|
|
||||||
|
|
||||||
# re-build argument list with replacement
|
|
||||||
set -- "$replaced" "$@"
|
|
||||||
done
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
case "$1" in
|
# Same, but for -Wl flags.
|
||||||
-isystem*)
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
arg="${1#-isystem}"
|
append return_other_args_list -Wl,-rpath
|
||||||
isystem_was_used=true
|
fi
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
}
|
||||||
if system_dir "$arg"; then
|
|
||||||
append isystem_system_include_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append isystem_include_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-I*)
|
|
||||||
arg="${1#-I}"
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append system_include_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append include_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-L*)
|
|
||||||
arg="${1#-L}"
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append system_lib_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append lib_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-l*)
|
|
||||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
|
||||||
# and passed by ifx to the linker, which confuses it with a
|
|
||||||
# library. Filter it out.
|
|
||||||
# TODO: generalize filtering of args with an env var, so that
|
|
||||||
# TODO: we do not have to special case this here.
|
|
||||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
|
||||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
|
||||||
shift
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
arg="${1#-l}"
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
append other_args_list "-l$arg"
|
|
||||||
;;
|
|
||||||
-Wl,*)
|
|
||||||
IFS=,
|
|
||||||
if ! parse_Wl ${1#-Wl,}; then
|
|
||||||
append other_args_list "$1"
|
|
||||||
fi
|
|
||||||
unset IFS
|
|
||||||
;;
|
|
||||||
-Xlinker)
|
|
||||||
shift
|
|
||||||
if [ $# -eq 0 ]; then
|
|
||||||
# -Xlinker without value: let the compiler error about it.
|
|
||||||
append other_args_list -Xlinker
|
|
||||||
xlinker_expect_rpath=no
|
|
||||||
break
|
|
||||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
|
||||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
|
||||||
if system_dir "$1"; then
|
|
||||||
append system_rpath_dirs_list "$1"
|
|
||||||
else
|
|
||||||
append rpath_dirs_list "$1"
|
|
||||||
fi
|
|
||||||
xlinker_expect_rpath=no
|
|
||||||
else
|
|
||||||
case "$1" in
|
|
||||||
-rpath=*)
|
|
||||||
arg="${1#-rpath=}"
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append rpath_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
--rpath=*)
|
|
||||||
arg="${1#--rpath=}"
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append rpath_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-rpath|--rpath)
|
|
||||||
xlinker_expect_rpath=yes
|
|
||||||
;;
|
|
||||||
"$dtags_to_strip")
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
append other_args_list -Xlinker
|
|
||||||
append other_args_list "$1"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
"$dtags_to_strip")
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
append other_args_list "$1"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
shift
|
|
||||||
done
|
|
||||||
|
|
||||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
categorize_arguments "$@"
|
||||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
include_dirs_list="$return_include_dirs_list"
|
||||||
# parsing.
|
lib_dirs_list="$return_lib_dirs_list"
|
||||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
append other_args_list -Xlinker
|
system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
append other_args_list -rpath
|
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
fi
|
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
|
isystem_was_used="$return_isystem_was_used"
|
||||||
# Same, but for -Wl flags.
|
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
if [ "$wl_expect_rpath" = yes ]; then
|
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
append other_args_list -Wl,-rpath
|
other_args_list="$return_other_args_list"
|
||||||
fi
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||||
@@ -673,12 +690,14 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
|||||||
extend flags_list SPACK_DEBUG_FLAGS
|
extend flags_list SPACK_DEBUG_FLAGS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
spack_flags_list=""
|
||||||
|
|
||||||
# Fortran flags come before CPPFLAGS
|
# Fortran flags come before CPPFLAGS
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
cc|ccld)
|
cc|ccld)
|
||||||
case $lang_flags in
|
case $lang_flags in
|
||||||
F)
|
F)
|
||||||
extend flags_list SPACK_FFLAGS
|
extend spack_flags_list SPACK_FFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
@@ -687,7 +706,7 @@ esac
|
|||||||
# C preprocessor flags come before any C/CXX flags
|
# C preprocessor flags come before any C/CXX flags
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
cpp|as|cc|ccld)
|
cpp|as|cc|ccld)
|
||||||
extend flags_list SPACK_CPPFLAGS
|
extend spack_flags_list SPACK_CPPFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@@ -697,10 +716,10 @@ case "$mode" in
|
|||||||
cc|ccld)
|
cc|ccld)
|
||||||
case $lang_flags in
|
case $lang_flags in
|
||||||
C)
|
C)
|
||||||
extend flags_list SPACK_CFLAGS
|
extend spack_flags_list SPACK_CFLAGS
|
||||||
;;
|
;;
|
||||||
CXX)
|
CXX)
|
||||||
extend flags_list SPACK_CXXFLAGS
|
extend spack_flags_list SPACK_CXXFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@@ -712,10 +731,25 @@ esac
|
|||||||
# Linker flags
|
# Linker flags
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
ld|ccld)
|
ld|ccld)
|
||||||
extend flags_list SPACK_LDFLAGS
|
extend spack_flags_list SPACK_LDFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
IFS="$lsep"
|
||||||
|
categorize_arguments $spack_flags_list
|
||||||
|
unset IFS
|
||||||
|
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||||
|
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||||
|
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
|
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
|
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
|
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
|
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||||
|
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
|
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
|
spack_flags_other_args_list="$return_other_args_list"
|
||||||
|
|
||||||
|
|
||||||
# On macOS insert headerpad_max_install_names linker flag
|
# On macOS insert headerpad_max_install_names linker flag
|
||||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||||
@@ -741,6 +775,8 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
|||||||
extend lib_dirs_list SPACK_LINK_DIRS
|
extend lib_dirs_list SPACK_LINK_DIRS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
libs_list=""
|
||||||
|
|
||||||
# add RPATHs if we're in in any linking mode
|
# add RPATHs if we're in in any linking mode
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
ld|ccld)
|
ld|ccld)
|
||||||
@@ -769,12 +805,16 @@ args_list="$flags_list"
|
|||||||
|
|
||||||
# Insert include directories just prior to any system include directories
|
# Insert include directories just prior to any system include directories
|
||||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||||
|
extend args_list spack_flags_include_dirs_list "-I"
|
||||||
extend args_list include_dirs_list "-I"
|
extend args_list include_dirs_list "-I"
|
||||||
|
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
cpp|cc|as|ccld)
|
cpp|cc|as|ccld)
|
||||||
if [ "$isystem_was_used" = "true" ]; then
|
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||||
|
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||||
|
elif [ "$isystem_was_used" = "true" ]; then
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||||
else
|
else
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||||
@@ -782,11 +822,15 @@ case "$mode" in
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
extend args_list spack_flags_system_include_dirs_list -I
|
||||||
extend args_list system_include_dirs_list -I
|
extend args_list system_include_dirs_list -I
|
||||||
|
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
# Library search paths
|
# Library search paths
|
||||||
|
extend args_list spack_flags_lib_dirs_list "-L"
|
||||||
extend args_list lib_dirs_list "-L"
|
extend args_list lib_dirs_list "-L"
|
||||||
|
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||||
extend args_list system_lib_dirs_list "-L"
|
extend args_list system_lib_dirs_list "-L"
|
||||||
|
|
||||||
# RPATHs arguments
|
# RPATHs arguments
|
||||||
@@ -795,20 +839,25 @@ case "$mode" in
|
|||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$linker_arg$dtags_to_add"
|
append args_list "$linker_arg$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
|
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||||
extend args_list rpath_dirs_list "$rpath"
|
extend args_list rpath_dirs_list "$rpath"
|
||||||
|
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||||
extend args_list system_rpath_dirs_list "$rpath"
|
extend args_list system_rpath_dirs_list "$rpath"
|
||||||
;;
|
;;
|
||||||
ld)
|
ld)
|
||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$dtags_to_add"
|
append args_list "$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
|
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||||
|
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Other arguments from the input command
|
# Other arguments from the input command
|
||||||
extend args_list other_args_list
|
extend args_list other_args_list
|
||||||
|
extend args_list spack_flags_other_args_list
|
||||||
|
|
||||||
# Inject SPACK_LDLIBS, if supplied
|
# Inject SPACK_LDLIBS, if supplied
|
||||||
extend args_list libs_list "-l"
|
extend args_list libs_list "-l"
|
||||||
@@ -864,3 +913,4 @@ fi
|
|||||||
# Execute the full command, preserving spaces with IFS set
|
# Execute the full command, preserving spaces with IFS set
|
||||||
# to the alarm bell separator.
|
# to the alarm bell separator.
|
||||||
IFS="$lsep"; exec $full_command_list
|
IFS="$lsep"; exec $full_command_list
|
||||||
|
|
||||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.1 (commit 4b1f21802a23b536bbcce73d3c631a566b20e8bd)
|
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
@@ -2803,7 +2803,7 @@
|
|||||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "10.2",
|
"versions": "10.2:10.2.99",
|
||||||
"flags" : "-mcpu=zeus"
|
"flags" : "-mcpu=zeus"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -65,9 +65,6 @@
|
|||||||
up to date with CTest, just make sure the ``*_matches`` and
|
up to date with CTest, just make sure the ``*_matches`` and
|
||||||
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
|
||||||
from __future__ import division
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import math
|
import math
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
@@ -211,7 +208,7 @@
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class LogEvent(object):
|
class LogEvent:
|
||||||
"""Class representing interesting events (e.g., errors) in a build log."""
|
"""Class representing interesting events (e.g., errors) in a build log."""
|
||||||
def __init__(self, text, line_no,
|
def __init__(self, text, line_no,
|
||||||
source_file=None, source_line_no=None,
|
source_file=None, source_line_no=None,
|
||||||
@@ -348,7 +345,7 @@ def _parse_unpack(args):
|
|||||||
return _parse(*args)
|
return _parse(*args)
|
||||||
|
|
||||||
|
|
||||||
class CTestLogParser(object):
|
class CTestLogParser:
|
||||||
"""Log file parser that extracts errors and warnings."""
|
"""Log file parser that extracts errors and warnings."""
|
||||||
def __init__(self, profile=False):
|
def __init__(self, profile=False):
|
||||||
# whether to record timing information
|
# whether to record timing information
|
||||||
|
@@ -3,33 +3,42 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
import abc
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import errno
|
|
||||||
import io
|
import io
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from typing import IO, Optional, Sequence, Tuple
|
||||||
|
|
||||||
|
|
||||||
class Command(object):
|
class Command:
|
||||||
"""Parsed representation of a command from argparse.
|
"""Parsed representation of a command from argparse.
|
||||||
|
|
||||||
This is a single command from an argparse parser. ``ArgparseWriter``
|
This is a single command from an argparse parser. ``ArgparseWriter`` creates these and returns
|
||||||
creates these and returns them from ``parse()``, and it passes one of
|
them from ``parse()``, and it passes one of these to each call to ``format()`` so that we can
|
||||||
these to each call to ``format()`` so that we can take an action for
|
take an action for a single command.
|
||||||
a single command.
|
|
||||||
|
|
||||||
Parts of a Command:
|
|
||||||
- prog: command name (str)
|
|
||||||
- description: command description (str)
|
|
||||||
- usage: command usage (str)
|
|
||||||
- positionals: list of positional arguments (list)
|
|
||||||
- optionals: list of optional arguments (list)
|
|
||||||
- subcommands: list of subcommand parsers (list)
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, prog, description, usage, positionals, optionals, subcommands):
|
def __init__(
|
||||||
|
self,
|
||||||
|
prog: str,
|
||||||
|
description: Optional[str],
|
||||||
|
usage: str,
|
||||||
|
positionals: Sequence[Tuple[str, str]],
|
||||||
|
optionals: Sequence[Tuple[Sequence[str], str, str]],
|
||||||
|
subcommands: Sequence[Tuple[ArgumentParser, str]],
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a new Command instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prog: Program name.
|
||||||
|
description: Command description.
|
||||||
|
usage: Command usage.
|
||||||
|
positionals: List of positional arguments.
|
||||||
|
optionals: List of optional arguments.
|
||||||
|
subcommands: List of subcommand parsers.
|
||||||
|
"""
|
||||||
self.prog = prog
|
self.prog = prog
|
||||||
self.description = description
|
self.description = description
|
||||||
self.usage = usage
|
self.usage = usage
|
||||||
@@ -38,35 +47,34 @@ def __init__(self, prog, description, usage, positionals, optionals, subcommands
|
|||||||
self.subcommands = subcommands
|
self.subcommands = subcommands
|
||||||
|
|
||||||
|
|
||||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access
|
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access to self._expand_help(),
|
||||||
# to self._expand_help(), ArgparseWriter is not intended to be used as a
|
# ArgparseWriter is not intended to be used as a formatter_class.
|
||||||
# formatter_class.
|
class ArgparseWriter(argparse.HelpFormatter, abc.ABC):
|
||||||
class ArgparseWriter(argparse.HelpFormatter):
|
"""Analyze an argparse ArgumentParser for easy generation of help."""
|
||||||
"""Analyzes an argparse ArgumentParser for easy generation of help."""
|
|
||||||
|
|
||||||
def __init__(self, prog, out=None, aliases=False):
|
def __init__(self, prog: str, out: IO = sys.stdout, aliases: bool = False) -> None:
|
||||||
"""Initializes a new ArgparseWriter instance.
|
"""Initialize a new ArgparseWriter instance.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
prog (str): the program name
|
prog: Program name.
|
||||||
out (file object): the file to write to (default sys.stdout)
|
out: File object to write to.
|
||||||
aliases (bool): whether or not to include subparsers for aliases
|
aliases: Whether or not to include subparsers for aliases.
|
||||||
"""
|
"""
|
||||||
super(ArgparseWriter, self).__init__(prog)
|
super().__init__(prog)
|
||||||
self.level = 0
|
self.level = 0
|
||||||
self.prog = prog
|
self.prog = prog
|
||||||
self.out = sys.stdout if out is None else out
|
self.out = out
|
||||||
self.aliases = aliases
|
self.aliases = aliases
|
||||||
|
|
||||||
def parse(self, parser, prog):
|
def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||||
"""Parses the parser object and returns the relavent components.
|
"""Parse the parser object and return the relavent components.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
parser (argparse.ArgumentParser): the parser
|
parser: Command parser.
|
||||||
prog (str): the command name
|
prog: Program name.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(Command) information about the command from the parser
|
Information about the command from the parser.
|
||||||
"""
|
"""
|
||||||
self.parser = parser
|
self.parser = parser
|
||||||
|
|
||||||
@@ -80,8 +88,7 @@ def parse(self, parser, prog):
|
|||||||
groups = parser._mutually_exclusive_groups
|
groups = parser._mutually_exclusive_groups
|
||||||
usage = fmt._format_usage(None, actions, groups, "").strip()
|
usage = fmt._format_usage(None, actions, groups, "").strip()
|
||||||
|
|
||||||
# Go through actions and split them into optionals, positionals,
|
# Go through actions and split them into optionals, positionals, and subcommands
|
||||||
# and subcommands
|
|
||||||
optionals = []
|
optionals = []
|
||||||
positionals = []
|
positionals = []
|
||||||
subcommands = []
|
subcommands = []
|
||||||
@@ -98,7 +105,7 @@ def parse(self, parser, prog):
|
|||||||
subcommands.append((subparser, subaction.dest))
|
subcommands.append((subparser, subaction.dest))
|
||||||
|
|
||||||
# Look for aliases of the form 'name (alias, ...)'
|
# Look for aliases of the form 'name (alias, ...)'
|
||||||
if self.aliases:
|
if self.aliases and isinstance(subaction.metavar, str):
|
||||||
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
||||||
if match:
|
if match:
|
||||||
aliases = match.group(2).split(", ")
|
aliases = match.group(2).split(", ")
|
||||||
@@ -113,28 +120,26 @@ def parse(self, parser, prog):
|
|||||||
|
|
||||||
return Command(prog, description, usage, positionals, optionals, subcommands)
|
return Command(prog, description, usage, positionals, optionals, subcommands)
|
||||||
|
|
||||||
def format(self, cmd):
|
@abc.abstractmethod
|
||||||
"""Returns the string representation of a single node in the
|
def format(self, cmd: Command) -> str:
|
||||||
parser tree.
|
"""Return the string representation of a single node in the parser tree.
|
||||||
|
|
||||||
Override this in subclasses to define how each subcommand
|
Override this in subclasses to define how each subcommand should be displayed.
|
||||||
should be displayed.
|
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
(Command): parsed information about a command or subcommand
|
cmd: Parsed information about a command or subcommand.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the string representation of this subcommand
|
String representation of this subcommand.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def _write(self, parser, prog, level=0):
|
def _write(self, parser: ArgumentParser, prog: str, level: int = 0) -> None:
|
||||||
"""Recursively writes a parser.
|
"""Recursively write a parser.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
parser (argparse.ArgumentParser): the parser
|
parser: Command parser.
|
||||||
prog (str): the command name
|
prog: Program name.
|
||||||
level (int): the current level
|
level: Current level.
|
||||||
"""
|
"""
|
||||||
self.level = level
|
self.level = level
|
||||||
|
|
||||||
@@ -144,19 +149,17 @@ def _write(self, parser, prog, level=0):
|
|||||||
for subparser, prog in cmd.subcommands:
|
for subparser, prog in cmd.subcommands:
|
||||||
self._write(subparser, prog, level=level + 1)
|
self._write(subparser, prog, level=level + 1)
|
||||||
|
|
||||||
def write(self, parser):
|
def write(self, parser: ArgumentParser) -> None:
|
||||||
"""Write out details about an ArgumentParser.
|
"""Write out details about an ArgumentParser.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
parser (argparse.ArgumentParser): the parser
|
parser: Command parser.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self._write(parser, self.prog)
|
self._write(parser, self.prog)
|
||||||
except IOError as e:
|
except BrokenPipeError:
|
||||||
# Swallow pipe errors
|
# Swallow pipe errors
|
||||||
# Raises IOError in Python 2 and BrokenPipeError in Python 3
|
pass
|
||||||
if e.errno != errno.EPIPE:
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
||||||
@@ -165,21 +168,33 @@ def write(self, parser):
|
|||||||
class ArgparseRstWriter(ArgparseWriter):
|
class ArgparseRstWriter(ArgparseWriter):
|
||||||
"""Write argparse output as rst sections."""
|
"""Write argparse output as rst sections."""
|
||||||
|
|
||||||
def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
def __init__(
|
||||||
"""Create a new ArgparseRstWriter.
|
self,
|
||||||
|
prog: str,
|
||||||
|
out: IO = sys.stdout,
|
||||||
|
aliases: bool = False,
|
||||||
|
rst_levels: Sequence[str] = _rst_levels,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a new ArgparseRstWriter instance.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
prog (str): program name
|
prog: Program name.
|
||||||
out (file object): file to write to
|
out: File object to write to.
|
||||||
aliases (bool): whether or not to include subparsers for aliases
|
aliases: Whether or not to include subparsers for aliases.
|
||||||
rst_levels (list of str): list of characters
|
rst_levels: List of characters for rst section headings.
|
||||||
for rst section headings
|
|
||||||
"""
|
"""
|
||||||
out = sys.stdout if out is None else out
|
super().__init__(prog, out, aliases)
|
||||||
super(ArgparseRstWriter, self).__init__(prog, out, aliases)
|
|
||||||
self.rst_levels = rst_levels
|
self.rst_levels = rst_levels
|
||||||
|
|
||||||
def format(self, cmd):
|
def format(self, cmd: Command) -> str:
|
||||||
|
"""Return the string representation of a single node in the parser tree.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cmd: Parsed information about a command or subcommand.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
String representation of a node.
|
||||||
|
"""
|
||||||
string = io.StringIO()
|
string = io.StringIO()
|
||||||
string.write(self.begin_command(cmd.prog))
|
string.write(self.begin_command(cmd.prog))
|
||||||
|
|
||||||
@@ -205,7 +220,15 @@ def format(self, cmd):
|
|||||||
|
|
||||||
return string.getvalue()
|
return string.getvalue()
|
||||||
|
|
||||||
def begin_command(self, prog):
|
def begin_command(self, prog: str) -> str:
|
||||||
|
"""Text to print before a command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prog: Program name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Text before a command.
|
||||||
|
"""
|
||||||
return """
|
return """
|
||||||
----
|
----
|
||||||
|
|
||||||
@@ -218,10 +241,26 @@ def begin_command(self, prog):
|
|||||||
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
||||||
)
|
)
|
||||||
|
|
||||||
def description(self, description):
|
def description(self, description: str) -> str:
|
||||||
|
"""Description of a command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
description: Command description.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Description of a command.
|
||||||
|
"""
|
||||||
return description + "\n\n"
|
return description + "\n\n"
|
||||||
|
|
||||||
def usage(self, usage):
|
def usage(self, usage: str) -> str:
|
||||||
|
"""Example usage of a command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
usage: Command usage.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Usage of a command.
|
||||||
|
"""
|
||||||
return """\
|
return """\
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -231,10 +270,24 @@ def usage(self, usage):
|
|||||||
usage
|
usage
|
||||||
)
|
)
|
||||||
|
|
||||||
def begin_positionals(self):
|
def begin_positionals(self) -> str:
|
||||||
|
"""Text to print before positional arguments.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Positional arguments header.
|
||||||
|
"""
|
||||||
return "\n**Positional arguments**\n\n"
|
return "\n**Positional arguments**\n\n"
|
||||||
|
|
||||||
def positional(self, name, help):
|
def positional(self, name: str, help: str) -> str:
|
||||||
|
"""Description of a positional argument.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Argument name.
|
||||||
|
help: Help text.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Positional argument description.
|
||||||
|
"""
|
||||||
return """\
|
return """\
|
||||||
{0}
|
{0}
|
||||||
{1}
|
{1}
|
||||||
@@ -243,13 +296,32 @@ def positional(self, name, help):
|
|||||||
name, help
|
name, help
|
||||||
)
|
)
|
||||||
|
|
||||||
def end_positionals(self):
|
def end_positionals(self) -> str:
|
||||||
|
"""Text to print after positional arguments.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Positional arguments footer.
|
||||||
|
"""
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def begin_optionals(self):
|
def begin_optionals(self) -> str:
|
||||||
|
"""Text to print before optional arguments.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional arguments header.
|
||||||
|
"""
|
||||||
return "\n**Optional arguments**\n\n"
|
return "\n**Optional arguments**\n\n"
|
||||||
|
|
||||||
def optional(self, opts, help):
|
def optional(self, opts: str, help: str) -> str:
|
||||||
|
"""Description of an optional argument.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
opts: Optional argument.
|
||||||
|
help: Help text.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional argument description.
|
||||||
|
"""
|
||||||
return """\
|
return """\
|
||||||
``{0}``
|
``{0}``
|
||||||
{1}
|
{1}
|
||||||
@@ -258,10 +330,23 @@ def optional(self, opts, help):
|
|||||||
opts, help
|
opts, help
|
||||||
)
|
)
|
||||||
|
|
||||||
def end_optionals(self):
|
def end_optionals(self) -> str:
|
||||||
|
"""Text to print after optional arguments.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional arguments footer.
|
||||||
|
"""
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def begin_subcommands(self, subcommands):
|
def begin_subcommands(self, subcommands: Sequence[Tuple[ArgumentParser, str]]) -> str:
|
||||||
|
"""Table with links to other subcommands.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
subcommands: List of subcommands.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Subcommand linking text.
|
||||||
|
"""
|
||||||
string = """
|
string = """
|
||||||
**Subcommands**
|
**Subcommands**
|
||||||
|
|
||||||
@@ -280,29 +365,25 @@ def begin_subcommands(self, subcommands):
|
|||||||
class ArgparseCompletionWriter(ArgparseWriter):
|
class ArgparseCompletionWriter(ArgparseWriter):
|
||||||
"""Write argparse output as shell programmable tab completion functions."""
|
"""Write argparse output as shell programmable tab completion functions."""
|
||||||
|
|
||||||
def format(self, cmd):
|
def format(self, cmd: Command) -> str:
|
||||||
"""Returns the string representation of a single node in the
|
"""Return the string representation of a single node in the parser tree.
|
||||||
parser tree.
|
|
||||||
|
|
||||||
Override this in subclasses to define how each subcommand
|
Args:
|
||||||
should be displayed.
|
cmd: Parsed information about a command or subcommand.
|
||||||
|
|
||||||
Parameters:
|
|
||||||
(Command): parsed information about a command or subcommand
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the string representation of this subcommand
|
String representation of this subcommand.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert cmd.optionals # we should always at least have -h, --help
|
assert cmd.optionals # we should always at least have -h, --help
|
||||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||||
|
|
||||||
# We only care about the arguments/flags, not the help messages
|
# We only care about the arguments/flags, not the help messages
|
||||||
positionals = []
|
positionals: Tuple[str, ...] = ()
|
||||||
if cmd.positionals:
|
if cmd.positionals:
|
||||||
positionals, _ = zip(*cmd.positionals)
|
positionals, _ = zip(*cmd.positionals)
|
||||||
optionals, _, _ = zip(*cmd.optionals)
|
optionals, _, _ = zip(*cmd.optionals)
|
||||||
subcommands = []
|
subcommands: Tuple[str, ...] = ()
|
||||||
if cmd.subcommands:
|
if cmd.subcommands:
|
||||||
_, subcommands = zip(*cmd.subcommands)
|
_, subcommands = zip(*cmd.subcommands)
|
||||||
|
|
||||||
@@ -315,71 +396,73 @@ def format(self, cmd):
|
|||||||
+ self.end_function(cmd.prog)
|
+ self.end_function(cmd.prog)
|
||||||
)
|
)
|
||||||
|
|
||||||
def start_function(self, prog):
|
def start_function(self, prog: str) -> str:
|
||||||
"""Returns the syntax needed to begin a function definition.
|
"""Return the syntax needed to begin a function definition.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
prog (str): the command name
|
prog: Program name.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the function definition beginning
|
Function definition beginning.
|
||||||
"""
|
"""
|
||||||
name = prog.replace("-", "_").replace(" ", "_")
|
name = prog.replace("-", "_").replace(" ", "_")
|
||||||
return "\n_{0}() {{".format(name)
|
return "\n_{0}() {{".format(name)
|
||||||
|
|
||||||
def end_function(self, prog=None):
|
def end_function(self, prog: str) -> str:
|
||||||
"""Returns the syntax needed to end a function definition.
|
"""Return the syntax needed to end a function definition.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
prog (str or None): the command name
|
prog: Program name
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the function definition ending
|
Function definition ending.
|
||||||
"""
|
"""
|
||||||
return "}\n"
|
return "}\n"
|
||||||
|
|
||||||
def body(self, positionals, optionals, subcommands):
|
def body(
|
||||||
"""Returns the body of the function.
|
self, positionals: Sequence[str], optionals: Sequence[str], subcommands: Sequence[str]
|
||||||
|
) -> str:
|
||||||
|
"""Return the body of the function.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
positionals (list): list of positional arguments
|
positionals: List of positional arguments.
|
||||||
optionals (list): list of optional arguments
|
optionals: List of optional arguments.
|
||||||
subcommands (list): list of subcommand parsers
|
subcommands: List of subcommand parsers.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the function body
|
Function body.
|
||||||
"""
|
"""
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def positionals(self, positionals):
|
def positionals(self, positionals: Sequence[str]) -> str:
|
||||||
"""Returns the syntax for reporting positional arguments.
|
"""Return the syntax for reporting positional arguments.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
positionals (list): list of positional arguments
|
positionals: List of positional arguments.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the syntax for positional arguments
|
Syntax for positional arguments.
|
||||||
"""
|
"""
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def optionals(self, optionals):
|
def optionals(self, optionals: Sequence[str]) -> str:
|
||||||
"""Returns the syntax for reporting optional flags.
|
"""Return the syntax for reporting optional flags.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
optionals (list): list of optional arguments
|
optionals: List of optional arguments.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the syntax for optional flags
|
Syntax for optional flags.
|
||||||
"""
|
"""
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def subcommands(self, subcommands):
|
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||||
"""Returns the syntax for reporting subcommands.
|
"""Return the syntax for reporting subcommands.
|
||||||
|
|
||||||
Parameters:
|
Args:
|
||||||
subcommands (list): list of subcommand parsers
|
subcommands: List of subcommand parsers.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: the syntax for subcommand parsers
|
Syntax for subcommand parsers
|
||||||
"""
|
"""
|
||||||
return ""
|
return ""
|
||||||
|
@@ -402,7 +402,7 @@ def groupid_to_group(x):
|
|||||||
os.remove(backup_filename)
|
os.remove(backup_filename)
|
||||||
|
|
||||||
|
|
||||||
class FileFilter(object):
|
class FileFilter:
|
||||||
"""Convenience class for calling ``filter_file`` a lot."""
|
"""Convenience class for calling ``filter_file`` a lot."""
|
||||||
|
|
||||||
def __init__(self, *filenames):
|
def __init__(self, *filenames):
|
||||||
@@ -610,6 +610,8 @@ def chgrp(path, group, follow_symlinks=True):
|
|||||||
gid = grp.getgrnam(group).gr_gid
|
gid = grp.getgrnam(group).gr_gid
|
||||||
else:
|
else:
|
||||||
gid = group
|
gid = group
|
||||||
|
if os.stat(path).st_gid == gid:
|
||||||
|
return
|
||||||
if follow_symlinks:
|
if follow_symlinks:
|
||||||
os.chown(path, -1, gid)
|
os.chown(path, -1, gid)
|
||||||
else:
|
else:
|
||||||
@@ -1336,7 +1338,7 @@ def lexists_islink_isdir(path):
|
|||||||
return True, is_link, is_dir
|
return True, is_link, is_dir
|
||||||
|
|
||||||
|
|
||||||
class BaseDirectoryVisitor(object):
|
class BaseDirectoryVisitor:
|
||||||
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
||||||
|
|
||||||
def visit_file(self, root, rel_path, depth):
|
def visit_file(self, root, rel_path, depth):
|
||||||
@@ -1890,7 +1892,7 @@ class HeaderList(FileList):
|
|||||||
include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
|
include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
|
||||||
|
|
||||||
def __init__(self, files):
|
def __init__(self, files):
|
||||||
super(HeaderList, self).__init__(files)
|
super().__init__(files)
|
||||||
|
|
||||||
self._macro_definitions = []
|
self._macro_definitions = []
|
||||||
self._directories = None
|
self._directories = None
|
||||||
@@ -1916,7 +1918,7 @@ def _default_directories(self):
|
|||||||
"""Default computation of directories based on the list of
|
"""Default computation of directories based on the list of
|
||||||
header files.
|
header files.
|
||||||
"""
|
"""
|
||||||
dir_list = super(HeaderList, self).directories
|
dir_list = super().directories
|
||||||
values = []
|
values = []
|
||||||
for d in dir_list:
|
for d in dir_list:
|
||||||
# If the path contains a subdirectory named 'include' then stop
|
# If the path contains a subdirectory named 'include' then stop
|
||||||
@@ -2352,7 +2354,7 @@ def find_all_libraries(root, recursive=False):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WindowsSimulatedRPath(object):
|
class WindowsSimulatedRPath:
|
||||||
"""Class representing Windows filesystem rpath analog
|
"""Class representing Windows filesystem rpath analog
|
||||||
|
|
||||||
One instance of this class is associated with a package (only on Windows)
|
One instance of this class is associated with a package (only on Windows)
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import division
|
|
||||||
|
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import contextlib
|
import contextlib
|
||||||
import functools
|
import functools
|
||||||
@@ -768,10 +766,10 @@ def pretty_seconds(seconds):
|
|||||||
|
|
||||||
class RequiredAttributeError(ValueError):
|
class RequiredAttributeError(ValueError):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(RequiredAttributeError, self).__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class ObjectWrapper(object):
|
class ObjectWrapper:
|
||||||
"""Base class that wraps an object. Derived classes can add new behavior
|
"""Base class that wraps an object. Derived classes can add new behavior
|
||||||
while staying undercover.
|
while staying undercover.
|
||||||
|
|
||||||
@@ -798,7 +796,7 @@ def __init__(self, wrapped_object):
|
|||||||
self.__dict__ = wrapped_object.__dict__
|
self.__dict__ = wrapped_object.__dict__
|
||||||
|
|
||||||
|
|
||||||
class Singleton(object):
|
class Singleton:
|
||||||
"""Simple wrapper for lazily initialized singleton objects."""
|
"""Simple wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory):
|
def __init__(self, factory):
|
||||||
@@ -845,7 +843,7 @@ def __repr__(self):
|
|||||||
return repr(self.instance)
|
return repr(self.instance)
|
||||||
|
|
||||||
|
|
||||||
class LazyReference(object):
|
class LazyReference:
|
||||||
"""Lazily evaluated reference to part of a singleton."""
|
"""Lazily evaluated reference to part of a singleton."""
|
||||||
|
|
||||||
def __init__(self, ref_function):
|
def __init__(self, ref_function):
|
||||||
@@ -943,7 +941,7 @@ def _wrapper(args):
|
|||||||
return _wrapper
|
return _wrapper
|
||||||
|
|
||||||
|
|
||||||
class Devnull(object):
|
class Devnull:
|
||||||
"""Null stream with less overhead than ``os.devnull``.
|
"""Null stream with less overhead than ``os.devnull``.
|
||||||
|
|
||||||
See https://stackoverflow.com/a/2929954.
|
See https://stackoverflow.com/a/2929954.
|
||||||
@@ -1060,7 +1058,7 @@ def __str__(self):
|
|||||||
return str(self.data)
|
return str(self.data)
|
||||||
|
|
||||||
|
|
||||||
class GroupedExceptionHandler(object):
|
class GroupedExceptionHandler:
|
||||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -1091,7 +1089,7 @@ def grouped_message(self, with_tracebacks: bool = True) -> str:
|
|||||||
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
||||||
|
|
||||||
|
|
||||||
class GroupedExceptionForwarder(object):
|
class GroupedExceptionForwarder:
|
||||||
"""A contextmanager to capture exceptions and forward them to a
|
"""A contextmanager to capture exceptions and forward them to a
|
||||||
GroupedExceptionHandler."""
|
GroupedExceptionHandler."""
|
||||||
|
|
||||||
@@ -1111,7 +1109,7 @@ def __exit__(self, exc_type, exc_value, tb):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class classproperty(object):
|
class classproperty:
|
||||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||||
the evaluation is injected at creation time and take an instance (could be None) and
|
the evaluation is injected at creation time and take an instance (could be None) and
|
||||||
an owner (i.e. the class that originated the instance)
|
an owner (i.e. the class that originated the instance)
|
||||||
|
@@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
"""LinkTree class for setting up trees of symbolic links."""
|
"""LinkTree class for setting up trees of symbolic links."""
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
@@ -287,7 +285,7 @@ def visit_symlinked_file(self, root, rel_path, depth):
|
|||||||
self.visit_file(root, rel_path, depth)
|
self.visit_file(root, rel_path, depth)
|
||||||
|
|
||||||
|
|
||||||
class LinkTree(object):
|
class LinkTree:
|
||||||
"""Class to create trees of symbolic links from a source directory.
|
"""Class to create trees of symbolic links from a source directory.
|
||||||
|
|
||||||
LinkTree objects are constructed with a source root. Their
|
LinkTree objects are constructed with a source root. Their
|
||||||
@@ -432,12 +430,12 @@ class MergeConflictError(Exception):
|
|||||||
|
|
||||||
class ConflictingSpecsError(MergeConflictError):
|
class ConflictingSpecsError(MergeConflictError):
|
||||||
def __init__(self, spec_1, spec_2):
|
def __init__(self, spec_1, spec_2):
|
||||||
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
super().__init__(spec_1, spec_2)
|
||||||
|
|
||||||
|
|
||||||
class SingleMergeConflictError(MergeConflictError):
|
class SingleMergeConflictError(MergeConflictError):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
super().__init__("Package merge blocked by file: %s" % path)
|
||||||
|
|
||||||
|
|
||||||
class MergeConflictSummary(MergeConflictError):
|
class MergeConflictSummary(MergeConflictError):
|
||||||
@@ -452,4 +450,4 @@ def __init__(self, conflicts):
|
|||||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||||
conflict.src_a, conflict.src_b, conflict.dst
|
conflict.src_a, conflict.src_b, conflict.dst
|
||||||
)
|
)
|
||||||
super(MergeConflictSummary, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
@@ -39,7 +39,7 @@
|
|||||||
true_fn = lambda: True
|
true_fn = lambda: True
|
||||||
|
|
||||||
|
|
||||||
class OpenFile(object):
|
class OpenFile:
|
||||||
"""Record for keeping track of open lockfiles (with reference counting).
|
"""Record for keeping track of open lockfiles (with reference counting).
|
||||||
|
|
||||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||||
@@ -53,7 +53,7 @@ def __init__(self, fh):
|
|||||||
self.refs = 0
|
self.refs = 0
|
||||||
|
|
||||||
|
|
||||||
class OpenFileTracker(object):
|
class OpenFileTracker:
|
||||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||||
|
|
||||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||||
@@ -169,7 +169,7 @@ def _attempts_str(wait_time, nattempts):
|
|||||||
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||||
|
|
||||||
|
|
||||||
class LockType(object):
|
class LockType:
|
||||||
READ = 0
|
READ = 0
|
||||||
WRITE = 1
|
WRITE = 1
|
||||||
|
|
||||||
@@ -192,7 +192,7 @@ def is_valid(op):
|
|||||||
return op == LockType.READ or op == LockType.WRITE
|
return op == LockType.READ or op == LockType.WRITE
|
||||||
|
|
||||||
|
|
||||||
class Lock(object):
|
class Lock:
|
||||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||||
|
|
||||||
In Python, ``lockf`` actually calls ``fcntl``, so this should work with
|
In Python, ``lockf`` actually calls ``fcntl``, so this should work with
|
||||||
@@ -681,7 +681,7 @@ def _status_msg(self, locktype, status):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class LockTransaction(object):
|
class LockTransaction:
|
||||||
"""Simple nested transaction context manager that uses a file lock.
|
"""Simple nested transaction context manager that uses a file lock.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -770,7 +770,7 @@ class LockDowngradeError(LockError):
|
|||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
||||||
super(LockDowngradeError, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class LockLimitError(LockError):
|
class LockLimitError(LockError):
|
||||||
@@ -782,7 +782,7 @@ class LockTimeoutError(LockError):
|
|||||||
|
|
||||||
def __init__(self, lock_type, path, time, attempts):
|
def __init__(self, lock_type, path, time, attempts):
|
||||||
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
||||||
super(LockTimeoutError, self).__init__(
|
super().__init__(
|
||||||
fmt.format(
|
fmt.format(
|
||||||
lock_type,
|
lock_type,
|
||||||
pretty_seconds(time),
|
pretty_seconds(time),
|
||||||
@@ -798,7 +798,7 @@ class LockUpgradeError(LockError):
|
|||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
||||||
super(LockUpgradeError, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class LockPermissionError(LockError):
|
class LockPermissionError(LockError):
|
||||||
@@ -810,7 +810,7 @@ class LockROFileError(LockPermissionError):
|
|||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "Can't take write lock on read-only file: %s" % path
|
msg = "Can't take write lock on read-only file: %s" % path
|
||||||
super(LockROFileError, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class CantCreateLockError(LockPermissionError):
|
class CantCreateLockError(LockPermissionError):
|
||||||
@@ -819,4 +819,4 @@ class CantCreateLockError(LockPermissionError):
|
|||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "cannot create lock '%s': " % path
|
msg = "cannot create lock '%s': " % path
|
||||||
msg += "file does not exist and location is not writable"
|
msg += "file does not exist and location is not writable"
|
||||||
super(LockError, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
@@ -6,8 +6,6 @@
|
|||||||
"""
|
"""
|
||||||
Routines for printing columnar output. See ``colify()`` for more information.
|
Routines for printing columnar output. See ``colify()`` for more information.
|
||||||
"""
|
"""
|
||||||
from __future__ import division, unicode_literals
|
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@@ -59,8 +59,6 @@
|
|||||||
|
|
||||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||||
"""
|
"""
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
@@ -70,7 +68,7 @@ class ColorParseError(Exception):
|
|||||||
"""Raised when a color format fails to parse."""
|
"""Raised when a color format fails to parse."""
|
||||||
|
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(ColorParseError, self).__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
# Text styles for ansi codes
|
# Text styles for ansi codes
|
||||||
@@ -205,7 +203,7 @@ def color_when(value):
|
|||||||
set_color_when(old_value)
|
set_color_when(old_value)
|
||||||
|
|
||||||
|
|
||||||
class match_to_ansi(object):
|
class match_to_ansi:
|
||||||
def __init__(self, color=True, enclose=False):
|
def __init__(self, color=True, enclose=False):
|
||||||
self.color = _color_when_value(color)
|
self.color = _color_when_value(color)
|
||||||
self.enclose = enclose
|
self.enclose = enclose
|
||||||
@@ -321,7 +319,7 @@ def cescape(string):
|
|||||||
return string
|
return string
|
||||||
|
|
||||||
|
|
||||||
class ColorStream(object):
|
class ColorStream:
|
||||||
def __init__(self, stream, color=None):
|
def __init__(self, stream, color=None):
|
||||||
self._stream = stream
|
self._stream = stream
|
||||||
self._color = color
|
self._color = color
|
||||||
|
@@ -5,8 +5,6 @@
|
|||||||
|
|
||||||
"""Utility classes for logging the output of blocks of code.
|
"""Utility classes for logging the output of blocks of code.
|
||||||
"""
|
"""
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
import ctypes
|
import ctypes
|
||||||
import errno
|
import errno
|
||||||
@@ -67,7 +65,7 @@ def _strip(line):
|
|||||||
return _escape.sub("", line)
|
return _escape.sub("", line)
|
||||||
|
|
||||||
|
|
||||||
class keyboard_input(object):
|
class keyboard_input:
|
||||||
"""Context manager to disable line editing and echoing.
|
"""Context manager to disable line editing and echoing.
|
||||||
|
|
||||||
Use this with ``sys.stdin`` for keyboard input, e.g.::
|
Use this with ``sys.stdin`` for keyboard input, e.g.::
|
||||||
@@ -244,7 +242,7 @@ def __exit__(self, exc_type, exception, traceback):
|
|||||||
signal.signal(signum, old_handler)
|
signal.signal(signum, old_handler)
|
||||||
|
|
||||||
|
|
||||||
class Unbuffered(object):
|
class Unbuffered:
|
||||||
"""Wrapper for Python streams that forces them to be unbuffered.
|
"""Wrapper for Python streams that forces them to be unbuffered.
|
||||||
|
|
||||||
This is implemented by forcing a flush after each write.
|
This is implemented by forcing a flush after each write.
|
||||||
@@ -289,7 +287,7 @@ def _file_descriptors_work(*streams):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class FileWrapper(object):
|
class FileWrapper:
|
||||||
"""Represents a file. Can be an open stream, a path to a file (not opened
|
"""Represents a file. Can be an open stream, a path to a file (not opened
|
||||||
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
||||||
object.
|
object.
|
||||||
@@ -331,7 +329,7 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
class MultiProcessFd(object):
|
class MultiProcessFd:
|
||||||
"""Return an object which stores a file descriptor and can be passed as an
|
"""Return an object which stores a file descriptor and can be passed as an
|
||||||
argument to a function run with ``multiprocessing.Process``, such that
|
argument to a function run with ``multiprocessing.Process``, such that
|
||||||
the file descriptor is available in the subprocess."""
|
the file descriptor is available in the subprocess."""
|
||||||
@@ -431,7 +429,7 @@ def log_output(*args, **kwargs):
|
|||||||
return nixlog(*args, **kwargs)
|
return nixlog(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class nixlog(object):
|
class nixlog:
|
||||||
"""
|
"""
|
||||||
Under the hood, we spawn a daemon and set up a pipe between this
|
Under the hood, we spawn a daemon and set up a pipe between this
|
||||||
process and the daemon. The daemon writes our output to both the
|
process and the daemon. The daemon writes our output to both the
|
||||||
@@ -752,7 +750,7 @@ def close(self):
|
|||||||
os.close(self.saved_stream)
|
os.close(self.saved_stream)
|
||||||
|
|
||||||
|
|
||||||
class winlog(object):
|
class winlog:
|
||||||
"""
|
"""
|
||||||
Similar to nixlog, with underlying
|
Similar to nixlog, with underlying
|
||||||
functionality ported to support Windows.
|
functionality ported to support Windows.
|
||||||
|
@@ -13,8 +13,6 @@
|
|||||||
|
|
||||||
Note: The functionality in this module is unsupported on Windows
|
Note: The functionality in this module is unsupported on Windows
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -36,7 +34,7 @@
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProcessController(object):
|
class ProcessController:
|
||||||
"""Wrapper around some fundamental process control operations.
|
"""Wrapper around some fundamental process control operations.
|
||||||
|
|
||||||
This allows one process (the controller) to drive another (the
|
This allows one process (the controller) to drive another (the
|
||||||
@@ -157,7 +155,7 @@ def wait_running(self):
|
|||||||
self.wait(lambda: "T" not in self.proc_status())
|
self.wait(lambda: "T" not in self.proc_status())
|
||||||
|
|
||||||
|
|
||||||
class PseudoShell(object):
|
class PseudoShell:
|
||||||
"""Sets up controller and minion processes with a PTY.
|
"""Sets up controller and minion processes with a PTY.
|
||||||
|
|
||||||
You can create a ``PseudoShell`` if you want to test how some
|
You can create a ``PseudoShell`` if you want to test how some
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.20.0.dev0"
|
__version__ = "0.21.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
@@ -13,7 +13,7 @@
|
|||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
|
|
||||||
class ABI(object):
|
class ABI:
|
||||||
"""This class provides methods to test ABI compatibility between specs.
|
"""This class provides methods to test ABI compatibility between specs.
|
||||||
The current implementation is rather rough and could be improved."""
|
The current implementation is rather rough and could be improved."""
|
||||||
|
|
||||||
|
@@ -60,7 +60,7 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
GROUPS = collections.defaultdict(list)
|
GROUPS = collections.defaultdict(list)
|
||||||
|
|
||||||
|
|
||||||
class Error(object):
|
class Error:
|
||||||
"""Information on an error reported in a test."""
|
"""Information on an error reported in a test."""
|
||||||
|
|
||||||
def __init__(self, summary, details):
|
def __init__(self, summary, details):
|
||||||
|
@@ -80,14 +80,14 @@ def __init__(self, errors):
|
|||||||
else:
|
else:
|
||||||
err = errors[0]
|
err = errors[0]
|
||||||
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
||||||
super(FetchCacheError, self).__init__(self.message)
|
super().__init__(self.message)
|
||||||
|
|
||||||
|
|
||||||
class ListMirrorSpecsError(spack.error.SpackError):
|
class ListMirrorSpecsError(spack.error.SpackError):
|
||||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||||
|
|
||||||
|
|
||||||
class BinaryCacheIndex(object):
|
class BinaryCacheIndex:
|
||||||
"""
|
"""
|
||||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||||
binary caches.
|
binary caches.
|
||||||
@@ -517,9 +517,7 @@ class NoOverwriteException(spack.error.SpackError):
|
|||||||
"""Raised when a file would be overwritten"""
|
"""Raised when a file would be overwritten"""
|
||||||
|
|
||||||
def __init__(self, file_path):
|
def __init__(self, file_path):
|
||||||
super(NoOverwriteException, self).__init__(
|
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||||
f"Refusing to overwrite the following file: {file_path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NoGpgException(spack.error.SpackError):
|
class NoGpgException(spack.error.SpackError):
|
||||||
@@ -528,7 +526,7 @@ class NoGpgException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super(NoGpgException, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class NoKeyException(spack.error.SpackError):
|
class NoKeyException(spack.error.SpackError):
|
||||||
@@ -537,7 +535,7 @@ class NoKeyException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super(NoKeyException, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class PickKeyException(spack.error.SpackError):
|
class PickKeyException(spack.error.SpackError):
|
||||||
@@ -548,7 +546,7 @@ class PickKeyException(spack.error.SpackError):
|
|||||||
def __init__(self, keys):
|
def __init__(self, keys):
|
||||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||||
super(PickKeyException, self).__init__(err_msg)
|
super().__init__(err_msg)
|
||||||
|
|
||||||
|
|
||||||
class NoVerifyException(spack.error.SpackError):
|
class NoVerifyException(spack.error.SpackError):
|
||||||
@@ -565,7 +563,7 @@ class NoChecksumException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||||
super(NoChecksumException, self).__init__(
|
super().__init__(
|
||||||
f"{algorithm} checksum failed for {path}",
|
f"{algorithm} checksum failed for {path}",
|
||||||
f"Expected {expected} but got {computed}. "
|
f"Expected {expected} but got {computed}. "
|
||||||
f"File size = {size} bytes. Contents = {contents!r}",
|
f"File size = {size} bytes. Contents = {contents!r}",
|
||||||
@@ -578,7 +576,7 @@ class NewLayoutException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super(NewLayoutException, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class UnsignedPackageException(spack.error.SpackError):
|
class UnsignedPackageException(spack.error.SpackError):
|
||||||
@@ -760,13 +758,12 @@ def hashes_to_prefixes(spec):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_buildinfo_dict(spec, rel=False):
|
def get_buildinfo_dict(spec):
|
||||||
"""Create metadata for a tarball"""
|
"""Create metadata for a tarball"""
|
||||||
manifest = get_buildfile_manifest(spec)
|
manifest = get_buildfile_manifest(spec)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||||
"relative_rpaths": rel,
|
|
||||||
"buildpath": spack.store.layout.root,
|
"buildpath": spack.store.layout.root,
|
||||||
"spackprefix": spack.paths.prefix,
|
"spackprefix": spack.paths.prefix,
|
||||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||||
@@ -1209,9 +1206,6 @@ class PushOptions(NamedTuple):
|
|||||||
#: Overwrite existing tarball/metadata files in buildcache
|
#: Overwrite existing tarball/metadata files in buildcache
|
||||||
force: bool = False
|
force: bool = False
|
||||||
|
|
||||||
#: Whether to use relative RPATHs
|
|
||||||
relative: bool = False
|
|
||||||
|
|
||||||
#: Allow absolute paths to package prefixes when creating a tarball
|
#: Allow absolute paths to package prefixes when creating a tarball
|
||||||
allow_root: bool = False
|
allow_root: bool = False
|
||||||
|
|
||||||
@@ -1281,41 +1275,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||||
|
|
||||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||||
workdir = os.path.join(stage_dir, pkg_dir)
|
|
||||||
|
|
||||||
# TODO: We generally don't want to mutate any files, but when using relative
|
binaries_dir = spec.prefix
|
||||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
|
||||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
|
||||||
|
|
||||||
if options.relative:
|
|
||||||
# tarfile is used because it preserves hardlink etc best.
|
|
||||||
binaries_dir = workdir
|
|
||||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
|
||||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
|
||||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
|
||||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
|
||||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
|
||||||
tar.extractall(workdir)
|
|
||||||
os.remove(temp_tarfile_path)
|
|
||||||
else:
|
|
||||||
binaries_dir = spec.prefix
|
|
||||||
|
|
||||||
# create info for later relocation and create tar
|
# create info for later relocation and create tar
|
||||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
buildinfo = get_buildinfo_dict(spec)
|
||||||
|
|
||||||
# optionally make the paths in the binaries relative to each other
|
if not options.allow_root:
|
||||||
# in the spack install tree before creating tarball
|
|
||||||
if options.relative:
|
|
||||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
|
||||||
elif not options.allow_root:
|
|
||||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||||
|
|
||||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||||
|
|
||||||
# remove copy of install directory
|
|
||||||
if options.relative:
|
|
||||||
shutil.rmtree(workdir)
|
|
||||||
|
|
||||||
# get the sha256 checksum of the tarball
|
# get the sha256 checksum of the tarball
|
||||||
checksum = checksum_tarball(tarfile_path)
|
checksum = checksum_tarball(tarfile_path)
|
||||||
|
|
||||||
@@ -1336,7 +1306,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
# This will be used to determine is the directory layout has changed.
|
# This will be used to determine is the directory layout has changed.
|
||||||
buildinfo = {}
|
buildinfo = {}
|
||||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||||
buildinfo["relative_rpaths"] = options.relative
|
|
||||||
spec_dict["buildinfo"] = buildinfo
|
spec_dict["buildinfo"] = buildinfo
|
||||||
|
|
||||||
with open(specfile_path, "w") as outfile:
|
with open(specfile_path, "w") as outfile:
|
||||||
@@ -1596,35 +1565,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
|
||||||
"""
|
|
||||||
Change paths in binaries to relative paths. Change absolute symlinks
|
|
||||||
to relative symlinks.
|
|
||||||
"""
|
|
||||||
prefix = spec.prefix
|
|
||||||
old_layout_root = buildinfo["buildpath"]
|
|
||||||
orig_path_names = list()
|
|
||||||
cur_path_names = list()
|
|
||||||
for filename in buildinfo["relocate_binaries"]:
|
|
||||||
orig_path_names.append(os.path.join(prefix, filename))
|
|
||||||
cur_path_names.append(os.path.join(workdir, filename))
|
|
||||||
|
|
||||||
platform = spack.platforms.by_name(spec.platform)
|
|
||||||
if "macho" in platform.binary_formats:
|
|
||||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
|
||||||
|
|
||||||
if "elf" in platform.binary_formats:
|
|
||||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
|
||||||
|
|
||||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
|
||||||
orig_path_names = list()
|
|
||||||
cur_path_names = list()
|
|
||||||
for linkname in buildinfo.get("relocate_links", []):
|
|
||||||
orig_path_names.append(os.path.join(prefix, linkname))
|
|
||||||
cur_path_names.append(os.path.join(workdir, linkname))
|
|
||||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||||
"""Check if package binaries are relocatable."""
|
"""Check if package binaries are relocatable."""
|
||||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||||
@@ -2395,7 +2335,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
|||||||
return download_buildcache_entry(files_to_fetch, mirror_url)
|
return download_buildcache_entry(files_to_fetch, mirror_url)
|
||||||
|
|
||||||
|
|
||||||
class BinaryCacheQuery(object):
|
class BinaryCacheQuery:
|
||||||
"""Callable object to query if a spec is in a binary cache"""
|
"""Callable object to query if a spec is in a binary cache"""
|
||||||
|
|
||||||
def __init__(self, all_architectures):
|
def __init__(self, all_architectures):
|
||||||
|
@@ -175,12 +175,12 @@ def black_root_spec() -> str:
|
|||||||
|
|
||||||
def flake8_root_spec() -> str:
|
def flake8_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap flake8"""
|
"""Return the root spec used to bootstrap flake8"""
|
||||||
return _root_spec("py-flake8")
|
return _root_spec("py-flake8@3.8.2:")
|
||||||
|
|
||||||
|
|
||||||
def pytest_root_spec() -> str:
|
def pytest_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap flake8"""
|
"""Return the root spec used to bootstrap flake8"""
|
||||||
return _root_spec("py-pytest")
|
return _root_spec("py-pytest@6.2.4:")
|
||||||
|
|
||||||
|
|
||||||
def ensure_environment_dependencies() -> None:
|
def ensure_environment_dependencies() -> None:
|
||||||
|
@@ -148,7 +148,7 @@ class MakeExecutable(Executable):
|
|||||||
|
|
||||||
def __init__(self, name, jobs, **kwargs):
|
def __init__(self, name, jobs, **kwargs):
|
||||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||||
super(MakeExecutable, self).__init__(name, **kwargs)
|
super().__init__(name, **kwargs)
|
||||||
self.supports_jobserver = supports_jobserver
|
self.supports_jobserver = supports_jobserver
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
|
|
||||||
@@ -175,7 +175,7 @@ def __call__(self, *args, **kwargs):
|
|||||||
if jobs_env_jobs is not None:
|
if jobs_env_jobs is not None:
|
||||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||||
|
|
||||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def _on_cray():
|
def _on_cray():
|
||||||
@@ -589,7 +589,6 @@ def set_module_variables_for_package(pkg):
|
|||||||
|
|
||||||
# TODO: make these build deps that can be installed if not found.
|
# TODO: make these build deps that can be installed if not found.
|
||||||
m.make = MakeExecutable("make", jobs)
|
m.make = MakeExecutable("make", jobs)
|
||||||
m.gmake = MakeExecutable("gmake", jobs)
|
|
||||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||||
# TODO: johnwparent: add package or builder support to define these build tools
|
# TODO: johnwparent: add package or builder support to define these build tools
|
||||||
# for now there is no entrypoint for builders to define these on their
|
# for now there is no entrypoint for builders to define these on their
|
||||||
@@ -1216,6 +1215,9 @@ def child_fun():
|
|||||||
return child_result
|
return child_result
|
||||||
|
|
||||||
|
|
||||||
|
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||||
|
|
||||||
|
|
||||||
def get_package_context(traceback, context=3):
|
def get_package_context(traceback, context=3):
|
||||||
"""Return some context for an error message when the build fails.
|
"""Return some context for an error message when the build fails.
|
||||||
|
|
||||||
@@ -1244,32 +1246,38 @@ def make_stack(tb, stack=None):
|
|||||||
|
|
||||||
stack = make_stack(traceback)
|
stack = make_stack(traceback)
|
||||||
|
|
||||||
|
basenames = tuple(base.__name__ for base in CONTEXT_BASES)
|
||||||
for tb in stack:
|
for tb in stack:
|
||||||
frame = tb.tb_frame
|
frame = tb.tb_frame
|
||||||
if "self" in frame.f_locals:
|
if "self" in frame.f_locals:
|
||||||
# Find the first proper subclass of PackageBase.
|
# Find the first proper subclass of the PackageBase or BaseBuilder, but
|
||||||
|
# don't provide context if the code is actually in the base classes.
|
||||||
obj = frame.f_locals["self"]
|
obj = frame.f_locals["self"]
|
||||||
if isinstance(obj, spack.package_base.PackageBase):
|
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||||
|
if func:
|
||||||
|
typename, *_ = func.__qualname__.partition(".")
|
||||||
|
|
||||||
|
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# We found obj, the Package implementation we care about.
|
# We found obj, the Package implementation we care about.
|
||||||
# Point out the location in the install method where we failed.
|
# Point out the location in the install method where we failed.
|
||||||
lines = [
|
filename = inspect.getfile(frame.f_code)
|
||||||
"{0}:{1:d}, in {2}:".format(
|
lineno = frame.f_lineno
|
||||||
inspect.getfile(frame.f_code),
|
if os.path.basename(filename) == "package.py":
|
||||||
frame.f_lineno - 1, # subtract 1 because f_lineno is 0-indexed
|
# subtract 1 because we inject a magic import at the top of package files.
|
||||||
frame.f_code.co_name,
|
# TODO: get rid of the magic import.
|
||||||
)
|
lineno -= 1
|
||||||
]
|
|
||||||
|
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||||
|
|
||||||
# Build a message showing context in the install method.
|
# Build a message showing context in the install method.
|
||||||
sourcelines, start = inspect.getsourcelines(frame)
|
sourcelines, start = inspect.getsourcelines(frame)
|
||||||
|
|
||||||
# Calculate lineno of the error relative to the start of the function.
|
# Calculate lineno of the error relative to the start of the function.
|
||||||
# Subtract 1 because f_lineno is 0-indexed.
|
fun_lineno = lineno - start
|
||||||
fun_lineno = frame.f_lineno - start - 1
|
|
||||||
start_ctx = max(0, fun_lineno - context)
|
start_ctx = max(0, fun_lineno - context)
|
||||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||||
|
|
||||||
@@ -1324,7 +1332,7 @@ class ChildError(InstallError):
|
|||||||
build_errors = [("spack.util.executable", "ProcessError")]
|
build_errors = [("spack.util.executable", "ProcessError")]
|
||||||
|
|
||||||
def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
|
def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
|
||||||
super(ChildError, self).__init__(msg)
|
super().__init__(msg)
|
||||||
self.module = module
|
self.module = module
|
||||||
self.name = classname
|
self.name = classname
|
||||||
self.traceback = traceback_string
|
self.traceback = traceback_string
|
||||||
@@ -1365,7 +1373,7 @@ def long_message(self):
|
|||||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||||
if os.path.isfile(test_log):
|
if os.path.isfile(test_log):
|
||||||
out.write("\nSee test log for details:\n")
|
out.write("\nSee test log for details:\n")
|
||||||
out.write(" {0}n".format(test_log))
|
out.write(" {0}\n".format(test_log))
|
||||||
|
|
||||||
return out.getvalue()
|
return out.getvalue()
|
||||||
|
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections.abc
|
||||||
import os
|
import os
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
@@ -13,21 +14,24 @@
|
|||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_path(name, value, comment=""):
|
def cmake_cache_path(name, value, comment="", force=False):
|
||||||
"""Generate a string for a cmake cache variable"""
|
"""Generate a string for a cmake cache variable"""
|
||||||
return 'set({0} "{1}" CACHE PATH "{2}")\n'.format(name, value, comment)
|
force_str = " FORCE" if force else ""
|
||||||
|
return 'set({0} "{1}" CACHE PATH "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_string(name, value, comment=""):
|
def cmake_cache_string(name, value, comment="", force=False):
|
||||||
"""Generate a string for a cmake cache variable"""
|
"""Generate a string for a cmake cache variable"""
|
||||||
return 'set({0} "{1}" CACHE STRING "{2}")\n'.format(name, value, comment)
|
force_str = " FORCE" if force else ""
|
||||||
|
return 'set({0} "{1}" CACHE STRING "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_option(name, boolean_value, comment=""):
|
def cmake_cache_option(name, boolean_value, comment="", force=False):
|
||||||
"""Generate a string for a cmake configuration option"""
|
"""Generate a string for a cmake configuration option"""
|
||||||
|
|
||||||
value = "ON" if boolean_value else "OFF"
|
value = "ON" if boolean_value else "OFF"
|
||||||
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
force_str = " FORCE" if force else ""
|
||||||
|
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
|
||||||
|
|
||||||
|
|
||||||
class CachedCMakeBuilder(CMakeBuilder):
|
class CachedCMakeBuilder(CMakeBuilder):
|
||||||
@@ -63,6 +67,34 @@ def cache_name(self):
|
|||||||
def cache_path(self):
|
def cache_path(self):
|
||||||
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
||||||
|
|
||||||
|
# Implement a version of the define_from_variant for Cached packages
|
||||||
|
def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
||||||
|
"""Return a Cached CMake field from the given variant's value.
|
||||||
|
See define_from_variant in lib/spack/spack/build_systems/cmake.py package
|
||||||
|
"""
|
||||||
|
|
||||||
|
if variant is None:
|
||||||
|
variant = cmake_var.lower()
|
||||||
|
|
||||||
|
if variant not in self.pkg.variants:
|
||||||
|
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||||
|
|
||||||
|
if variant not in self.pkg.spec.variants:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
value = self.pkg.spec.variants[variant].value
|
||||||
|
field = None
|
||||||
|
if isinstance(value, bool):
|
||||||
|
field = cmake_cache_option(cmake_var, value, comment)
|
||||||
|
else:
|
||||||
|
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||||
|
value = ";".join(str(v) for v in value)
|
||||||
|
else:
|
||||||
|
value = str(value)
|
||||||
|
field = cmake_cache_string(cmake_var, value, comment)
|
||||||
|
|
||||||
|
return field
|
||||||
|
|
||||||
def initconfig_compiler_entries(self):
|
def initconfig_compiler_entries(self):
|
||||||
# This will tell cmake to use the Spack compiler wrappers when run
|
# This will tell cmake to use the Spack compiler wrappers when run
|
||||||
# through Spack, but use the underlying compiler when run outside of
|
# through Spack, but use the underlying compiler when run outside of
|
||||||
@@ -130,6 +162,17 @@ def initconfig_compiler_entries(self):
|
|||||||
libs_string = libs_format_string.format(lang)
|
libs_string = libs_format_string.format(lang)
|
||||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||||
|
|
||||||
|
# Set the generator in the cached config
|
||||||
|
if self.spec.satisfies("generator=make"):
|
||||||
|
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||||
|
if self.spec.satisfies("generator=ninja"):
|
||||||
|
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string(
|
||||||
|
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def initconfig_mpi_entries(self):
|
def initconfig_mpi_entries(self):
|
||||||
@@ -195,26 +238,58 @@ def initconfig_hardware_entries(self):
|
|||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Provide standard CMake arguments for dependent CachedCMakePackages
|
||||||
if spec.satisfies("^cuda"):
|
if spec.satisfies("^cuda"):
|
||||||
entries.append("#------------------{0}".format("-" * 30))
|
entries.append("#------------------{0}".format("-" * 30))
|
||||||
entries.append("# Cuda")
|
entries.append("# Cuda")
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
cudatoolkitdir = spec["cuda"].prefix
|
cudatoolkitdir = spec["cuda"].prefix
|
||||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
entries.append(cmake_cache_path("CUDAToolkit_ROOT", cudatoolkitdir))
|
||||||
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", "${CUDAToolkit_ROOT}/bin/nvcc"))
|
||||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
|
||||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||||
|
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||||
|
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||||
|
|
||||||
|
archs = spec.variants["cuda_arch"].value
|
||||||
|
if archs[0] != "none":
|
||||||
|
arch_str = ";".join(archs)
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||||
|
)
|
||||||
|
|
||||||
|
if "+rocm" in spec:
|
||||||
|
entries.append("#------------------{0}".format("-" * 30))
|
||||||
|
entries.append("# ROCm")
|
||||||
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
|
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||||
|
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||||
|
)
|
||||||
|
archs = self.spec.variants["amdgpu_target"].value
|
||||||
|
if archs[0] != "none":
|
||||||
|
arch_str = ";".join(archs)
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||||
|
)
|
||||||
|
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||||
|
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
|
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||||
|
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||||
return [
|
return [
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
|
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||||
|
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||||
]
|
]
|
||||||
|
|
||||||
def initconfig_package_entries(self):
|
def initconfig_package_entries(self):
|
||||||
@@ -237,7 +312,7 @@ def initconfig(self, pkg, spec, prefix):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def std_cmake_args(self):
|
def std_cmake_args(self):
|
||||||
args = super(CachedCMakeBuilder, self).std_cmake_args
|
args = super().std_cmake_args
|
||||||
args.extend(["-C", self.cache_path])
|
args.extend(["-C", self.cache_path])
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
@@ -5,6 +5,7 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
|
import pathlib
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -15,7 +16,6 @@
|
|||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.util.path
|
|
||||||
from spack.directives import build_system, conflicts, depends_on, variant
|
from spack.directives import build_system, conflicts, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -271,7 +271,7 @@ def std_args(pkg, generator=None):
|
|||||||
args = [
|
args = [
|
||||||
"-G",
|
"-G",
|
||||||
generator,
|
generator,
|
||||||
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
|
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||||
define("CMAKE_BUILD_TYPE", build_type),
|
define("CMAKE_BUILD_TYPE", build_type),
|
||||||
define("BUILD_TESTING", pkg.run_tests),
|
define("BUILD_TESTING", pkg.run_tests),
|
||||||
]
|
]
|
||||||
|
@@ -102,11 +102,10 @@ def cuda_flags(arch_list):
|
|||||||
|
|
||||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||||
|
|
||||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||||
|
|
||||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||||
depends_on("cuda@11.8:", when="cuda_arch=90")
|
|
||||||
|
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||||
|
|
||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
|
@@ -121,7 +121,7 @@ def setup_run_environment(self, env):
|
|||||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||||
"""
|
"""
|
||||||
# Only if environment modifications are desired (default is +envmods)
|
# Only if environment modifications are desired (default is +envmods)
|
||||||
if "+envmods" in self.spec:
|
if "~envmods" not in self.spec:
|
||||||
env.extend(
|
env.extend(
|
||||||
EnvironmentModifications.from_sourcing_file(
|
EnvironmentModifications.from_sourcing_file(
|
||||||
join_path(self.component_prefix, "env", "vars.sh")
|
join_path(self.component_prefix, "env", "vars.sh")
|
||||||
@@ -175,7 +175,7 @@ def libs(self):
|
|||||||
return find_libraries("*", root=lib_path, shared=True, recursive=True)
|
return find_libraries("*", root=lib_path, shared=True, recursive=True)
|
||||||
|
|
||||||
|
|
||||||
class IntelOneApiStaticLibraryList(object):
|
class IntelOneApiStaticLibraryList:
|
||||||
"""Provides ld_flags when static linking is needed
|
"""Provides ld_flags when static linking is needed
|
||||||
|
|
||||||
Oneapi puts static and dynamic libraries in the same directory, so
|
Oneapi puts static and dynamic libraries in the same directory, so
|
||||||
|
@@ -23,6 +23,7 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
from spack.directives import build_system, depends_on, extends, maintainers
|
from spack.directives import build_system, depends_on, extends, maintainers
|
||||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||||
|
from spack.install_test import test_part
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
from ._checks import BaseBuilder, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
@@ -167,18 +168,65 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
|
|
||||||
view.remove_files(to_remove)
|
view.remove_files(to_remove)
|
||||||
|
|
||||||
def test(self):
|
def test_imports(self):
|
||||||
"""Attempts to import modules of the installed package."""
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
# Make sure we are importing the installed modules,
|
||||||
# not the ones in the source directory
|
# not the ones in the source directory
|
||||||
|
python = inspect.getmodule(self).python.path
|
||||||
for module in self.import_modules:
|
for module in self.import_modules:
|
||||||
self.run_test(
|
with test_part(
|
||||||
inspect.getmodule(self).python.path,
|
self,
|
||||||
["-c", "import {0}".format(module)],
|
f"test_imports_{module}",
|
||||||
purpose="checking import of {0}".format(module),
|
purpose=f"checking import of {module}",
|
||||||
work_dir="spack-test",
|
work_dir="spack-test",
|
||||||
)
|
):
|
||||||
|
python("-c", f"import {module}")
|
||||||
|
|
||||||
|
def update_external_dependencies(self, extendee_spec=None):
|
||||||
|
"""
|
||||||
|
Ensure all external python packages have a python dependency
|
||||||
|
|
||||||
|
If another package in the DAG depends on python, we use that
|
||||||
|
python for the dependency of the external. If not, we assume
|
||||||
|
that the external PythonPackage is installed into the same
|
||||||
|
directory as the python it depends on.
|
||||||
|
"""
|
||||||
|
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||||
|
if "python" not in self.spec:
|
||||||
|
if extendee_spec:
|
||||||
|
python = extendee_spec
|
||||||
|
elif "python" in self.spec.root:
|
||||||
|
python = self.spec.root["python"]
|
||||||
|
else:
|
||||||
|
python = self.get_external_python_for_prefix()
|
||||||
|
if not python.concrete:
|
||||||
|
repo = spack.repo.path.repo_for_pkg(python)
|
||||||
|
python.namespace = repo.namespace
|
||||||
|
|
||||||
|
# Ensure architecture information is present
|
||||||
|
if not python.architecture:
|
||||||
|
host_platform = spack.platforms.host()
|
||||||
|
host_os = host_platform.operating_system("default_os")
|
||||||
|
host_target = host_platform.target("default_target")
|
||||||
|
python.architecture = spack.spec.ArchSpec(
|
||||||
|
(str(host_platform), str(host_os), str(host_target))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if not python.architecture.platform:
|
||||||
|
python.architecture.platform = spack.platforms.host()
|
||||||
|
if not python.architecture.os:
|
||||||
|
python.architecture.os = "default_os"
|
||||||
|
if not python.architecture.target:
|
||||||
|
python.architecture.target = archspec.cpu.host().family.name
|
||||||
|
|
||||||
|
# Ensure compiler information is present
|
||||||
|
if not python.compiler:
|
||||||
|
python.compiler = self.spec.compiler
|
||||||
|
|
||||||
|
python.external_path = self.spec.external_path
|
||||||
|
python._mark_concrete()
|
||||||
|
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||||
|
|
||||||
|
|
||||||
class PythonPackage(PythonExtension):
|
class PythonPackage(PythonExtension):
|
||||||
@@ -225,51 +273,6 @@ def list_url(cls):
|
|||||||
name = cls.pypi.split("/")[0]
|
name = cls.pypi.split("/")[0]
|
||||||
return "https://pypi.org/simple/" + name + "/"
|
return "https://pypi.org/simple/" + name + "/"
|
||||||
|
|
||||||
def update_external_dependencies(self, extendee_spec=None):
|
|
||||||
"""
|
|
||||||
Ensure all external python packages have a python dependency
|
|
||||||
|
|
||||||
If another package in the DAG depends on python, we use that
|
|
||||||
python for the dependency of the external. If not, we assume
|
|
||||||
that the external PythonPackage is installed into the same
|
|
||||||
directory as the python it depends on.
|
|
||||||
"""
|
|
||||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
|
||||||
if "python" not in self.spec:
|
|
||||||
if extendee_spec:
|
|
||||||
python = extendee_spec
|
|
||||||
elif "python" in self.spec.root:
|
|
||||||
python = self.spec.root["python"]
|
|
||||||
else:
|
|
||||||
python = self.get_external_python_for_prefix()
|
|
||||||
if not python.concrete:
|
|
||||||
repo = spack.repo.path.repo_for_pkg(python)
|
|
||||||
python.namespace = repo.namespace
|
|
||||||
|
|
||||||
# Ensure architecture information is present
|
|
||||||
if not python.architecture:
|
|
||||||
host_platform = spack.platforms.host()
|
|
||||||
host_os = host_platform.operating_system("default_os")
|
|
||||||
host_target = host_platform.target("default_target")
|
|
||||||
python.architecture = spack.spec.ArchSpec(
|
|
||||||
(str(host_platform), str(host_os), str(host_target))
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if not python.architecture.platform:
|
|
||||||
python.architecture.platform = spack.platforms.host()
|
|
||||||
if not python.architecture.os:
|
|
||||||
python.architecture.os = "default_os"
|
|
||||||
if not python.architecture.target:
|
|
||||||
python.architecture.target = archspec.cpu.host().family.name
|
|
||||||
|
|
||||||
# Ensure compiler information is present
|
|
||||||
if not python.compiler:
|
|
||||||
python.compiler = self.spec.compiler
|
|
||||||
|
|
||||||
python.external_path = self.spec.external_path
|
|
||||||
python._mark_concrete()
|
|
||||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
|
||||||
|
|
||||||
def get_external_python_for_prefix(self):
|
def get_external_python_for_prefix(self):
|
||||||
"""
|
"""
|
||||||
For an external package that extends python, find the most likely spec for the python
|
For an external package that extends python, find the most likely spec for the python
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
|
|
||||||
from spack.directives import extends, maintainers
|
from spack.directives import extends
|
||||||
|
|
||||||
from .generic import GenericBuilder, Package
|
from .generic import GenericBuilder, Package
|
||||||
|
|
||||||
@@ -71,8 +71,6 @@ class RPackage(Package):
|
|||||||
|
|
||||||
GenericBuilder = RBuilder
|
GenericBuilder = RBuilder
|
||||||
|
|
||||||
maintainers("glennpj")
|
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
build_system_class = "RPackage"
|
build_system_class = "RPackage"
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
from llnl.util.filesystem import find, join_path, working_dir
|
from llnl.util.filesystem import find, join_path, working_dir
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.install_test
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -30,8 +31,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
|||||||
#: Name of private sip module to install alongside package
|
#: Name of private sip module to install alongside package
|
||||||
sip_module = "sip"
|
sip_module = "sip"
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time testing
|
||||||
install_time_test_callbacks = ["test"]
|
install_time_test_callbacks = ["test_imports"]
|
||||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
legacy_buildsystem = "sip"
|
legacy_buildsystem = "sip"
|
||||||
|
|
||||||
@@ -87,18 +88,20 @@ def python(self, *args, **kwargs):
|
|||||||
"""The python ``Executable``."""
|
"""The python ``Executable``."""
|
||||||
inspect.getmodule(self).python(*args, **kwargs)
|
inspect.getmodule(self).python(*args, **kwargs)
|
||||||
|
|
||||||
def test(self):
|
def test_imports(self):
|
||||||
"""Attempts to import modules of the installed package."""
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
# Make sure we are importing the installed modules,
|
||||||
# not the ones in the source directory
|
# not the ones in the source directory
|
||||||
|
python = inspect.getmodule(self).python
|
||||||
for module in self.import_modules:
|
for module in self.import_modules:
|
||||||
self.run_test(
|
with spack.install_test.test_part(
|
||||||
inspect.getmodule(self).python.path,
|
self,
|
||||||
["-c", "import {0}".format(module)],
|
"test_imports_{0}".format(module),
|
||||||
purpose="checking import of {0}".format(module),
|
purpose="checking import of {0}".format(module),
|
||||||
work_dir="spack-test",
|
work_dir="spack-test",
|
||||||
)
|
):
|
||||||
|
python("-c", "import {0}".format(module))
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("sip")
|
@spack.builder.builder("sip")
|
||||||
|
@@ -63,7 +63,7 @@ def create(pkg):
|
|||||||
return _BUILDERS[id(pkg)]
|
return _BUILDERS[id(pkg)]
|
||||||
|
|
||||||
|
|
||||||
class _PhaseAdapter(object):
|
class _PhaseAdapter:
|
||||||
def __init__(self, builder, phase_fn):
|
def __init__(self, builder, phase_fn):
|
||||||
self.builder = builder
|
self.builder = builder
|
||||||
self.phase_fn = phase_fn
|
self.phase_fn = phase_fn
|
||||||
@@ -115,7 +115,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
|||||||
# package. The semantic should be the same as the method in the base builder were still
|
# package. The semantic should be the same as the method in the base builder were still
|
||||||
# present in the base class of the package.
|
# present in the base class of the package.
|
||||||
|
|
||||||
class _ForwardToBaseBuilder(object):
|
class _ForwardToBaseBuilder:
|
||||||
def __init__(self, wrapped_pkg_object, root_builder):
|
def __init__(self, wrapped_pkg_object, root_builder):
|
||||||
self.wrapped_package_object = wrapped_pkg_object
|
self.wrapped_package_object = wrapped_pkg_object
|
||||||
self.root_builder = root_builder
|
self.root_builder = root_builder
|
||||||
@@ -188,7 +188,7 @@ def __init__(self, pkg):
|
|||||||
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
|
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
|
||||||
# method that will forward certain calls to the default builder.
|
# method that will forward certain calls to the default builder.
|
||||||
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
|
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
|
||||||
super(Adapter, self).__init__(pkg)
|
super().__init__(pkg)
|
||||||
|
|
||||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||||
@@ -388,7 +388,7 @@ def __new__(mcs, name, bases, attr_dict):
|
|||||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||||
|
|
||||||
|
|
||||||
class InstallationPhase(object):
|
class InstallationPhase:
|
||||||
"""Manages a single phase of the installation.
|
"""Manages a single phase of the installation.
|
||||||
|
|
||||||
This descriptor stores at creation time the name of the method it should
|
This descriptor stores at creation time the name of the method it should
|
||||||
@@ -530,9 +530,9 @@ def setup_build_environment(self, env):
|
|||||||
modifications to be applied when the package is built. Package authors
|
modifications to be applied when the package is built. Package authors
|
||||||
can call methods on it to alter the build environment.
|
can call methods on it to alter the build environment.
|
||||||
"""
|
"""
|
||||||
if not hasattr(super(Builder, self), "setup_build_environment"):
|
if not hasattr(super(), "setup_build_environment"):
|
||||||
return
|
return
|
||||||
super(Builder, self).setup_build_environment(env)
|
super().setup_build_environment(env)
|
||||||
|
|
||||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
"""Sets up the build environment of packages that depend on this one.
|
"""Sets up the build environment of packages that depend on this one.
|
||||||
@@ -563,9 +563,9 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
|||||||
the dependent's state. Note that *this* package's spec is
|
the dependent's state. Note that *this* package's spec is
|
||||||
available as ``self.spec``
|
available as ``self.spec``
|
||||||
"""
|
"""
|
||||||
if not hasattr(super(Builder, self), "setup_dependent_build_environment"):
|
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||||
return
|
return
|
||||||
super(Builder, self).setup_dependent_build_environment(env, dependent_spec)
|
super().setup_dependent_build_environment(env, dependent_spec)
|
||||||
|
|
||||||
def __getitem__(self, idx):
|
def __getitem__(self, idx):
|
||||||
key = self.phases[idx]
|
key = self.phases[idx]
|
||||||
|
@@ -58,7 +58,7 @@ def _fetch_cache():
|
|||||||
return spack.fetch_strategy.FsCache(path)
|
return spack.fetch_strategy.FsCache(path)
|
||||||
|
|
||||||
|
|
||||||
class MirrorCache(object):
|
class MirrorCache:
|
||||||
def __init__(self, root, skip_unstable_versions):
|
def __init__(self, root, skip_unstable_versions):
|
||||||
self.root = os.path.abspath(root)
|
self.root = os.path.abspath(root)
|
||||||
self.skip_unstable_versions = skip_unstable_versions
|
self.skip_unstable_versions = skip_unstable_versions
|
||||||
|
@@ -28,7 +28,6 @@
|
|||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.compilers as compilers
|
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.main
|
import spack.main
|
||||||
@@ -58,7 +57,7 @@
|
|||||||
PushResult = namedtuple("PushResult", "success url")
|
PushResult = namedtuple("PushResult", "success url")
|
||||||
|
|
||||||
|
|
||||||
class TemporaryDirectory(object):
|
class TemporaryDirectory:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.temporary_directory = tempfile.mkdtemp()
|
self.temporary_directory = tempfile.mkdtemp()
|
||||||
|
|
||||||
@@ -70,17 +69,10 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _is_main_phase(phase_name):
|
def get_job_name(spec, osarch, build_group):
|
||||||
return True if phase_name == "specs" else False
|
|
||||||
|
|
||||||
|
|
||||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
|
||||||
"""Given the necessary parts, format the gitlab job name
|
"""Given the necessary parts, format the gitlab job name
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
phase (str): Either 'specs' for the main phase, or the name of a
|
|
||||||
bootstrapping phase
|
|
||||||
strip_compiler (bool): Should compiler be stripped from job name
|
|
||||||
spec (spack.spec.Spec): Spec job will build
|
spec (spack.spec.Spec): Spec job will build
|
||||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||||
but sphinx doesn't recognize the type and fails).
|
but sphinx doesn't recognize the type and fails).
|
||||||
@@ -93,12 +85,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
|||||||
format_str = ""
|
format_str = ""
|
||||||
format_args = []
|
format_args = []
|
||||||
|
|
||||||
if phase:
|
format_str += "{{{0}}}".format(item_idx)
|
||||||
format_str += "({{{0}}})".format(item_idx)
|
|
||||||
format_args.append(phase)
|
|
||||||
item_idx += 1
|
|
||||||
|
|
||||||
format_str += " {{{0}}}".format(item_idx)
|
|
||||||
format_args.append(spec.name)
|
format_args.append(spec.name)
|
||||||
item_idx += 1
|
item_idx += 1
|
||||||
|
|
||||||
@@ -110,10 +97,9 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
|||||||
format_args.append(spec.version)
|
format_args.append(spec.version)
|
||||||
item_idx += 1
|
item_idx += 1
|
||||||
|
|
||||||
if _is_main_phase(phase) is True or strip_compiler is False:
|
format_str += " {{{0}}}".format(item_idx)
|
||||||
format_str += " {{{0}}}".format(item_idx)
|
format_args.append(spec.compiler)
|
||||||
format_args.append(spec.compiler)
|
item_idx += 1
|
||||||
item_idx += 1
|
|
||||||
|
|
||||||
format_str += " {{{0}}}".format(item_idx)
|
format_str += " {{{0}}}".format(item_idx)
|
||||||
format_args.append(osarch)
|
format_args.append(osarch)
|
||||||
@@ -153,49 +139,33 @@ def _add_dependency(spec_label, dep_label, deps):
|
|||||||
deps[spec_label].add(dep_label)
|
deps[spec_label].add(dep_label)
|
||||||
|
|
||||||
|
|
||||||
def _get_spec_dependencies(
|
def _get_spec_dependencies(specs, deps, spec_labels):
|
||||||
specs, deps, spec_labels, check_index_only=False, mirrors_to_check=None
|
spec_deps_obj = _compute_spec_deps(specs)
|
||||||
):
|
|
||||||
spec_deps_obj = _compute_spec_deps(
|
|
||||||
specs, check_index_only=check_index_only, mirrors_to_check=mirrors_to_check
|
|
||||||
)
|
|
||||||
|
|
||||||
if spec_deps_obj:
|
if spec_deps_obj:
|
||||||
dependencies = spec_deps_obj["dependencies"]
|
dependencies = spec_deps_obj["dependencies"]
|
||||||
specs = spec_deps_obj["specs"]
|
specs = spec_deps_obj["specs"]
|
||||||
|
|
||||||
for entry in specs:
|
for entry in specs:
|
||||||
spec_labels[entry["label"]] = {
|
spec_labels[entry["label"]] = entry["spec"]
|
||||||
"spec": entry["spec"],
|
|
||||||
"needs_rebuild": entry["needs_rebuild"],
|
|
||||||
}
|
|
||||||
|
|
||||||
for entry in dependencies:
|
for entry in dependencies:
|
||||||
_add_dependency(entry["spec"], entry["depends"], deps)
|
_add_dependency(entry["spec"], entry["depends"], deps)
|
||||||
|
|
||||||
|
|
||||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
def stage_spec_jobs(specs):
|
||||||
"""Take a set of release specs and generate a list of "stages", where the
|
"""Take a set of release specs and generate a list of "stages", where the
|
||||||
jobs in any stage are dependent only on jobs in previous stages. This
|
jobs in any stage are dependent only on jobs in previous stages. This
|
||||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
specs (Iterable): Specs to build
|
specs (Iterable): Specs to build
|
||||||
check_index_only (bool): Regardless of whether DAG pruning is enabled,
|
|
||||||
all configured mirrors are searched to see if binaries for specs
|
|
||||||
are up to date on those mirrors. This flag limits that search to
|
|
||||||
the binary cache indices on those mirrors to speed the process up,
|
|
||||||
even though there is no garantee the index is up to date.
|
|
||||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
|
||||||
any configured mirrors.
|
|
||||||
|
|
||||||
Returns: A tuple of information objects describing the specs, dependencies
|
Returns: A tuple of information objects describing the specs, dependencies
|
||||||
and stages:
|
and stages:
|
||||||
|
|
||||||
spec_labels: A dictionary mapping the spec labels which are made of
|
spec_labels: A dictionary mapping the spec labels (which are formatted
|
||||||
(pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild"
|
as pkg-name/hash-prefix) to concrete specs.
|
||||||
keys. The root spec is the spec of which this spec is a dependency
|
|
||||||
and the spec is the formatted spec string for this spec.
|
|
||||||
|
|
||||||
deps: A dictionary where the keys should also have appeared as keys in
|
deps: A dictionary where the keys should also have appeared as keys in
|
||||||
the spec_labels dictionary, and the values are the set of
|
the spec_labels dictionary, and the values are the set of
|
||||||
@@ -224,13 +194,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
|||||||
deps = {}
|
deps = {}
|
||||||
spec_labels = {}
|
spec_labels = {}
|
||||||
|
|
||||||
_get_spec_dependencies(
|
_get_spec_dependencies(specs, deps, spec_labels)
|
||||||
specs,
|
|
||||||
deps,
|
|
||||||
spec_labels,
|
|
||||||
check_index_only=check_index_only,
|
|
||||||
mirrors_to_check=mirrors_to_check,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Save the original deps, as we need to return them at the end of the
|
# Save the original deps, as we need to return them at the end of the
|
||||||
# function. In the while loop below, the "dependencies" variable is
|
# function. In the while loop below, the "dependencies" variable is
|
||||||
@@ -256,24 +220,36 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
|||||||
return spec_labels, deps, stages
|
return spec_labels, deps, stages
|
||||||
|
|
||||||
|
|
||||||
def _print_staging_summary(spec_labels, dependencies, stages):
|
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||||
if not stages:
|
if not stages:
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg(" Staging summary ([x] means a job needs rebuilding):")
|
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check)
|
||||||
|
tty.msg("Checked the following mirrors for binaries:")
|
||||||
|
for m in mirrors.values():
|
||||||
|
tty.msg(" {0}".format(m.fetch_url))
|
||||||
|
|
||||||
|
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||||
for stage_index, stage in enumerate(stages):
|
for stage_index, stage in enumerate(stages):
|
||||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||||
|
|
||||||
for job in sorted(stage):
|
for job in sorted(stage):
|
||||||
s = spec_labels[job]["spec"]
|
s = spec_labels[job]
|
||||||
|
rebuild = rebuild_decisions[job].rebuild
|
||||||
|
reason = rebuild_decisions[job].reason
|
||||||
|
reason_msg = " ({0})".format(reason) if reason else ""
|
||||||
tty.msg(
|
tty.msg(
|
||||||
" [{1}] {0} -> {2}".format(
|
" [{1}] {0} -> {2}{3}".format(
|
||||||
job, "x" if spec_labels[job]["needs_rebuild"] else " ", _get_spec_string(s)
|
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
if rebuild_decisions[job].mirrors:
|
||||||
|
tty.msg(" found on the following mirrors:")
|
||||||
|
for murl in rebuild_decisions[job].mirrors:
|
||||||
|
tty.msg(" {0}".format(murl))
|
||||||
|
|
||||||
|
|
||||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
def _compute_spec_deps(spec_list):
|
||||||
"""
|
"""
|
||||||
Computes all the dependencies for the spec(s) and generates a JSON
|
Computes all the dependencies for the spec(s) and generates a JSON
|
||||||
object which provides both a list of unique spec names as well as a
|
object which provides both a list of unique spec names as well as a
|
||||||
@@ -337,12 +313,8 @@ def append_dep(s, d):
|
|||||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
|
||||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
|
||||||
)
|
|
||||||
|
|
||||||
skey = _spec_deps_key(s)
|
skey = _spec_deps_key(s)
|
||||||
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
spec_labels[skey] = s
|
||||||
|
|
||||||
for d in s.dependencies(deptype=all):
|
for d in s.dependencies(deptype=all):
|
||||||
dkey = _spec_deps_key(d)
|
dkey = _spec_deps_key(d)
|
||||||
@@ -352,14 +324,8 @@ def append_dep(s, d):
|
|||||||
|
|
||||||
append_dep(skey, dkey)
|
append_dep(skey, dkey)
|
||||||
|
|
||||||
for spec_label, spec_holder in spec_labels.items():
|
for spec_label, concrete_spec in spec_labels.items():
|
||||||
specs.append(
|
specs.append({"label": spec_label, "spec": concrete_spec})
|
||||||
{
|
|
||||||
"label": spec_label,
|
|
||||||
"spec": spec_holder["spec"],
|
|
||||||
"needs_rebuild": spec_holder["needs_rebuild"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||||
|
|
||||||
@@ -371,26 +337,17 @@ def _spec_matches(spec, match_string):
|
|||||||
|
|
||||||
|
|
||||||
def _format_job_needs(
|
def _format_job_needs(
|
||||||
phase_name,
|
dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||||
strip_compilers,
|
|
||||||
dep_jobs,
|
|
||||||
osname,
|
|
||||||
build_group,
|
|
||||||
prune_dag,
|
|
||||||
stage_spec_dict,
|
|
||||||
enable_artifacts_buildcache,
|
|
||||||
):
|
):
|
||||||
needs_list = []
|
needs_list = []
|
||||||
for dep_job in dep_jobs:
|
for dep_job in dep_jobs:
|
||||||
dep_spec_key = _spec_deps_key(dep_job)
|
dep_spec_key = _spec_deps_key(dep_job)
|
||||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||||
|
|
||||||
if not prune_dag or dep_spec_info["needs_rebuild"]:
|
if not prune_dag or rebuild:
|
||||||
needs_list.append(
|
needs_list.append(
|
||||||
{
|
{
|
||||||
"job": get_job_name(
|
"job": get_job_name(dep_job, dep_job.architecture, build_group),
|
||||||
phase_name, strip_compilers, dep_job, dep_job.architecture, build_group
|
|
||||||
),
|
|
||||||
"artifacts": enable_artifacts_buildcache,
|
"artifacts": enable_artifacts_buildcache,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -490,17 +447,12 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
|||||||
return affected_specs
|
return affected_specs
|
||||||
|
|
||||||
|
|
||||||
def _build_jobs(phases, staged_phases):
|
def _build_jobs(spec_labels, stages):
|
||||||
for phase in phases:
|
for stage_jobs in stages:
|
||||||
phase_name = phase["name"]
|
for spec_label in stage_jobs:
|
||||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
release_spec = spec_labels[spec_label]
|
||||||
|
release_spec_dag_hash = release_spec.dag_hash()
|
||||||
for stage_jobs in stages:
|
yield release_spec, release_spec_dag_hash
|
||||||
for spec_label in stage_jobs:
|
|
||||||
spec_record = spec_labels[spec_label]
|
|
||||||
release_spec = spec_record["spec"]
|
|
||||||
release_spec_dag_hash = release_spec.dag_hash()
|
|
||||||
yield release_spec, release_spec_dag_hash
|
|
||||||
|
|
||||||
|
|
||||||
def _noop(x):
|
def _noop(x):
|
||||||
@@ -519,14 +471,21 @@ def _unpack_script(script_section, op=_noop):
|
|||||||
return script
|
return script
|
||||||
|
|
||||||
|
|
||||||
|
class RebuildDecision:
|
||||||
|
def __init__(self):
|
||||||
|
self.rebuild = True
|
||||||
|
self.mirrors = []
|
||||||
|
self.reason = ""
|
||||||
|
|
||||||
|
|
||||||
class SpackCI:
|
class SpackCI:
|
||||||
"""Spack CI object used to generate intermediate representation
|
"""Spack CI object used to generate intermediate representation
|
||||||
used by the CI generator(s).
|
used by the CI generator(s).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, ci_config, phases, staged_phases):
|
def __init__(self, ci_config, spec_labels, stages):
|
||||||
"""Given the information from the ci section of the config
|
"""Given the information from the ci section of the config
|
||||||
and the job phases setup meta data needed for generating Spack
|
and the staged jobs, set up meta data needed for generating Spack
|
||||||
CI IR.
|
CI IR.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -541,9 +500,6 @@ def __init__(self, ci_config, phases, staged_phases):
|
|||||||
"enable-artifacts-buildcache": self.ci_config.get(
|
"enable-artifacts-buildcache": self.ci_config.get(
|
||||||
"enable-artifacts-buildcache", False
|
"enable-artifacts-buildcache", False
|
||||||
),
|
),
|
||||||
"bootstrap": self.ci_config.get(
|
|
||||||
"bootstrap", []
|
|
||||||
), # This is deprecated and should be removed
|
|
||||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||||
@@ -551,7 +507,7 @@ def __init__(self, ci_config, phases, staged_phases):
|
|||||||
}
|
}
|
||||||
jobs = self.ir["jobs"]
|
jobs = self.ir["jobs"]
|
||||||
|
|
||||||
for spec, dag_hash in _build_jobs(phases, staged_phases):
|
for spec, dag_hash in _build_jobs(spec_labels, stages):
|
||||||
jobs[dag_hash] = self.__init_job(spec)
|
jobs[dag_hash] = self.__init_job(spec)
|
||||||
|
|
||||||
for name in self.named_jobs:
|
for name in self.named_jobs:
|
||||||
@@ -751,11 +707,12 @@ def generate_gitlab_ci_yaml(
|
|||||||
env.concretize()
|
env.concretize()
|
||||||
env.write()
|
env.write()
|
||||||
|
|
||||||
yaml_root = ev.config_dict(env.manifest)
|
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||||
|
|
||||||
# Get the joined "ci" config with all of the current scopes resolved
|
# Get the joined "ci" config with all of the current scopes resolved
|
||||||
ci_config = cfg.get("ci")
|
ci_config = cfg.get("ci")
|
||||||
|
|
||||||
|
config_deprecated = False
|
||||||
if not ci_config:
|
if not ci_config:
|
||||||
tty.warn("Environment does not have `ci` a configuration")
|
tty.warn("Environment does not have `ci` a configuration")
|
||||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||||
@@ -768,6 +725,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
)
|
)
|
||||||
translate_deprecated_config(gitlabci_config)
|
translate_deprecated_config(gitlabci_config)
|
||||||
ci_config = gitlabci_config
|
ci_config = gitlabci_config
|
||||||
|
config_deprecated = True
|
||||||
|
|
||||||
# Default target is gitlab...and only target is gitlab
|
# Default target is gitlab...and only target is gitlab
|
||||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||||
@@ -831,6 +789,14 @@ def generate_gitlab_ci_yaml(
|
|||||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||||
|
|
||||||
|
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||||
|
if copy_only_pipeline and config_deprecated:
|
||||||
|
tty.warn(
|
||||||
|
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||||
|
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||||
|
"instead.",
|
||||||
|
)
|
||||||
|
|
||||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||||
tty.die("spack ci generate requires an env containing a mirror")
|
tty.die("spack ci generate requires an env containing a mirror")
|
||||||
|
|
||||||
@@ -863,25 +829,6 @@ def generate_gitlab_ci_yaml(
|
|||||||
if "temporary-storage-url-prefix" in ci_config:
|
if "temporary-storage-url-prefix" in ci_config:
|
||||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||||
|
|
||||||
bootstrap_specs = []
|
|
||||||
phases = []
|
|
||||||
if "bootstrap" in ci_config:
|
|
||||||
for phase in ci_config["bootstrap"]:
|
|
||||||
try:
|
|
||||||
phase_name = phase.get("name")
|
|
||||||
strip_compilers = phase.get("compiler-agnostic")
|
|
||||||
except AttributeError:
|
|
||||||
phase_name = phase
|
|
||||||
strip_compilers = False
|
|
||||||
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
|
||||||
|
|
||||||
for bs in env.spec_lists[phase_name]:
|
|
||||||
bootstrap_specs.append(
|
|
||||||
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
|
||||||
)
|
|
||||||
|
|
||||||
phases.append({"name": "specs", "strip-compilers": False})
|
|
||||||
|
|
||||||
# If a remote mirror override (alternate buildcache destination) was
|
# If a remote mirror override (alternate buildcache destination) was
|
||||||
# specified, add it here in case it has already built hashes we might
|
# specified, add it here in case it has already built hashes we might
|
||||||
# generate.
|
# generate.
|
||||||
@@ -936,7 +883,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
# Add config scopes to environment
|
# Add config scopes to environment
|
||||||
env_includes = env_yaml_root["spack"].get("include", [])
|
env_includes = env_yaml_root["spack"].get("include", [])
|
||||||
cli_scopes = [
|
cli_scopes = [
|
||||||
os.path.abspath(s.path)
|
os.path.relpath(s.path, concrete_env_dir)
|
||||||
for s in cfg.scopes().values()
|
for s in cfg.scopes().values()
|
||||||
if type(s) == cfg.ImmutableConfigScope
|
if type(s) == cfg.ImmutableConfigScope
|
||||||
and s.path not in env_includes
|
and s.path not in env_includes
|
||||||
@@ -983,39 +930,13 @@ def generate_gitlab_ci_yaml(
|
|||||||
except bindist.FetchCacheError as e:
|
except bindist.FetchCacheError as e:
|
||||||
tty.warn(e)
|
tty.warn(e)
|
||||||
|
|
||||||
staged_phases = {}
|
spec_labels, dependencies, stages = stage_spec_jobs(
|
||||||
try:
|
[
|
||||||
for phase in phases:
|
concrete
|
||||||
phase_name = phase["name"]
|
for abstract, concrete in env.concretized_specs()
|
||||||
if phase_name == "specs":
|
if abstract in env.spec_lists["specs"]
|
||||||
# Anything in the "specs" of the environment are already
|
]
|
||||||
# concretized by the block at the top of this method, so we
|
)
|
||||||
# only need to find the concrete versions, and then avoid
|
|
||||||
# re-concretizing them needlessly later on.
|
|
||||||
concrete_phase_specs = [
|
|
||||||
concrete
|
|
||||||
for abstract, concrete in env.concretized_specs()
|
|
||||||
if abstract in env.spec_lists[phase_name]
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
# Any specs lists in other definitions (but not in the
|
|
||||||
# "specs") of the environment are not yet concretized so we
|
|
||||||
# have to concretize them explicitly here.
|
|
||||||
concrete_phase_specs = env.spec_lists[phase_name]
|
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
|
||||||
for phase_spec in concrete_phase_specs:
|
|
||||||
phase_spec.concretize()
|
|
||||||
staged_phases[phase_name] = stage_spec_jobs(
|
|
||||||
concrete_phase_specs,
|
|
||||||
check_index_only=check_index_only,
|
|
||||||
mirrors_to_check=mirrors_to_check,
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
# Clean up remote mirror override if enabled
|
|
||||||
if remote_mirror_override:
|
|
||||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
|
||||||
if spack_pipeline_type == "spack_pull_request":
|
|
||||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
|
||||||
|
|
||||||
all_job_names = []
|
all_job_names = []
|
||||||
output_object = {}
|
output_object = {}
|
||||||
@@ -1038,276 +959,212 @@ def generate_gitlab_ci_yaml(
|
|||||||
else:
|
else:
|
||||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||||
|
|
||||||
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
spack_ci = SpackCI(ci_config, spec_labels, stages)
|
||||||
spack_ci_ir = spack_ci.generate_ir()
|
spack_ci_ir = spack_ci.generate_ir()
|
||||||
|
|
||||||
for phase in phases:
|
rebuild_decisions = {}
|
||||||
phase_name = phase["name"]
|
|
||||||
strip_compilers = phase["strip-compilers"]
|
|
||||||
|
|
||||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
for stage_jobs in stages:
|
||||||
|
stage_name = "stage-{0}".format(stage_id)
|
||||||
|
stage_names.append(stage_name)
|
||||||
|
stage_id += 1
|
||||||
|
|
||||||
for stage_jobs in stages:
|
for spec_label in stage_jobs:
|
||||||
stage_name = "stage-{0}".format(stage_id)
|
release_spec = spec_labels[spec_label]
|
||||||
stage_names.append(stage_name)
|
release_spec_dag_hash = release_spec.dag_hash()
|
||||||
stage_id += 1
|
|
||||||
|
|
||||||
for spec_label in stage_jobs:
|
spec_record = RebuildDecision()
|
||||||
spec_record = spec_labels[spec_label]
|
rebuild_decisions[spec_label] = spec_record
|
||||||
release_spec = spec_record["spec"]
|
|
||||||
release_spec_dag_hash = release_spec.dag_hash()
|
|
||||||
|
|
||||||
if prune_untouched_packages:
|
if prune_untouched_packages:
|
||||||
if release_spec not in affected_specs:
|
if release_spec not in affected_specs:
|
||||||
tty.debug(
|
spec_record.rebuild = False
|
||||||
"Pruning {0}/{1}, untouched by change.".format(
|
spec_record.reason = "Pruned, untouched by change."
|
||||||
release_spec.name, release_spec.dag_hash()[:7]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
spec_record["needs_rebuild"] = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
|
||||||
|
|
||||||
if not job_object:
|
|
||||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if spack_pipeline_type is not None:
|
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||||
# For spack pipelines "public" and "protected" are reserved tags
|
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
)
|
||||||
if spack_pipeline_type == "spack_protected_branch":
|
|
||||||
job_object["tags"].extend(["protected"])
|
|
||||||
elif spack_pipeline_type == "spack_pull_request":
|
|
||||||
job_object["tags"].extend(["public"])
|
|
||||||
|
|
||||||
if "script" not in job_object:
|
spec_record.rebuild = not up_to_date_mirrors
|
||||||
raise AttributeError
|
if up_to_date_mirrors:
|
||||||
|
spec_record.reason = "Pruned, found in mirrors"
|
||||||
|
spec_record.mirrors = [m["mirror_url"] for m in up_to_date_mirrors]
|
||||||
|
else:
|
||||||
|
spec_record.reason = "Scheduled, not found anywhere"
|
||||||
|
|
||||||
def main_script_replacements(cmd):
|
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
|
||||||
|
|
||||||
job_object["script"] = _unpack_script(
|
if not job_object:
|
||||||
job_object["script"], op=main_script_replacements
|
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||||
)
|
continue
|
||||||
|
|
||||||
if "before_script" in job_object:
|
if spack_pipeline_type is not None:
|
||||||
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
# For spack pipelines "public" and "protected" are reserved tags
|
||||||
|
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||||
|
if spack_pipeline_type == "spack_protected_branch":
|
||||||
|
job_object["tags"].extend(["protected"])
|
||||||
|
elif spack_pipeline_type == "spack_pull_request":
|
||||||
|
job_object["tags"].extend(["public"])
|
||||||
|
|
||||||
if "after_script" in job_object:
|
if "script" not in job_object:
|
||||||
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
raise AttributeError
|
||||||
|
|
||||||
osname = str(release_spec.architecture)
|
def main_script_replacements(cmd):
|
||||||
job_name = get_job_name(
|
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||||
phase_name, strip_compilers, release_spec, osname, build_group
|
|
||||||
)
|
|
||||||
|
|
||||||
compiler_action = "NONE"
|
job_object["script"] = _unpack_script(
|
||||||
if len(phases) > 1:
|
job_object["script"], op=main_script_replacements
|
||||||
compiler_action = "FIND_ANY"
|
)
|
||||||
if _is_main_phase(phase_name):
|
|
||||||
compiler_action = "INSTALL_MISSING"
|
|
||||||
|
|
||||||
job_vars = job_object.setdefault("variables", {})
|
if "before_script" in job_object:
|
||||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
|
||||||
job_vars["SPACK_COMPILER_ACTION"] = compiler_action
|
|
||||||
|
|
||||||
job_object["needs"] = []
|
if "after_script" in job_object:
|
||||||
if spec_label in dependencies:
|
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||||
if enable_artifacts_buildcache:
|
|
||||||
# Get dependencies transitively, so they're all
|
|
||||||
# available in the artifacts buildcache.
|
|
||||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
|
||||||
else:
|
|
||||||
# In this case, "needs" is only used for scheduling
|
|
||||||
# purposes, so we only get the direct dependencies.
|
|
||||||
dep_jobs = []
|
|
||||||
for dep_label in dependencies[spec_label]:
|
|
||||||
dep_jobs.append(spec_labels[dep_label]["spec"])
|
|
||||||
|
|
||||||
job_object["needs"].extend(
|
osname = str(release_spec.architecture)
|
||||||
_format_job_needs(
|
job_name = get_job_name(release_spec, osname, build_group)
|
||||||
phase_name,
|
|
||||||
strip_compilers,
|
|
||||||
dep_jobs,
|
|
||||||
osname,
|
|
||||||
build_group,
|
|
||||||
prune_dag,
|
|
||||||
spec_labels,
|
|
||||||
enable_artifacts_buildcache,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
rebuild_spec = spec_record["needs_rebuild"]
|
job_vars = job_object.setdefault("variables", {})
|
||||||
|
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||||
# This next section helps gitlab make sure the right
|
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||||
# bootstrapped compiler exists in the artifacts buildcache by
|
|
||||||
# creating an artificial dependency between this spec and its
|
|
||||||
# compiler. So, if we are in the main phase, and if the
|
|
||||||
# compiler we are supposed to use is listed in any of the
|
|
||||||
# bootstrap spec lists, then we will add more dependencies to
|
|
||||||
# the job (that compiler and maybe it's dependencies as well).
|
|
||||||
if _is_main_phase(phase_name):
|
|
||||||
spec_arch_family = release_spec.architecture.target.microarchitecture.family
|
|
||||||
compiler_pkg_spec = compilers.pkg_spec_for_compiler(release_spec.compiler)
|
|
||||||
for bs in bootstrap_specs:
|
|
||||||
c_spec = bs["spec"]
|
|
||||||
bs_arch = c_spec.architecture
|
|
||||||
bs_arch_family = bs_arch.target.microarchitecture.family
|
|
||||||
if (
|
|
||||||
c_spec.intersects(compiler_pkg_spec)
|
|
||||||
and bs_arch_family == spec_arch_family
|
|
||||||
):
|
|
||||||
# We found the bootstrap compiler this release spec
|
|
||||||
# should be built with, so for DAG scheduling
|
|
||||||
# purposes, we will at least add the compiler spec
|
|
||||||
# to the jobs "needs". But if artifact buildcache
|
|
||||||
# is enabled, we'll have to add all transtive deps
|
|
||||||
# of the compiler as well.
|
|
||||||
|
|
||||||
# Here we check whether the bootstrapped compiler
|
|
||||||
# needs to be rebuilt. Until compilers are proper
|
|
||||||
# dependencies, we artificially force the spec to
|
|
||||||
# be rebuilt if the compiler targeted to build it
|
|
||||||
# needs to be rebuilt.
|
|
||||||
bs_specs, _, _ = staged_phases[bs["phase-name"]]
|
|
||||||
c_spec_key = _spec_deps_key(c_spec)
|
|
||||||
rbld_comp = bs_specs[c_spec_key]["needs_rebuild"]
|
|
||||||
rebuild_spec = rebuild_spec or rbld_comp
|
|
||||||
# Also update record so dependents do not fail to
|
|
||||||
# add this spec to their "needs"
|
|
||||||
spec_record["needs_rebuild"] = rebuild_spec
|
|
||||||
|
|
||||||
dep_jobs = [c_spec]
|
|
||||||
if enable_artifacts_buildcache:
|
|
||||||
dep_jobs = [d for d in c_spec.traverse(deptype=all)]
|
|
||||||
|
|
||||||
job_object["needs"].extend(
|
|
||||||
_format_job_needs(
|
|
||||||
bs["phase-name"],
|
|
||||||
bs["strip-compilers"],
|
|
||||||
dep_jobs,
|
|
||||||
str(bs_arch),
|
|
||||||
build_group,
|
|
||||||
prune_dag,
|
|
||||||
bs_specs,
|
|
||||||
enable_artifacts_buildcache,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
debug_msg = "".join(
|
|
||||||
[
|
|
||||||
"Considered compiler {0} for spec ",
|
|
||||||
"{1}, but rejected it either because it was ",
|
|
||||||
"not the compiler required by the spec, or ",
|
|
||||||
"because the target arch families of the ",
|
|
||||||
"spec and the compiler did not match",
|
|
||||||
]
|
|
||||||
).format(c_spec, release_spec)
|
|
||||||
tty.debug(debug_msg)
|
|
||||||
|
|
||||||
if prune_dag and not rebuild_spec and spack_pipeline_type != "spack_copy_only":
|
|
||||||
tty.debug(
|
|
||||||
"Pruning {0}/{1}, does not need rebuild.".format(
|
|
||||||
release_spec.name, release_spec.dag_hash()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
|
||||||
known_broken_specs_encountered.append(release_spec_dag_hash)
|
|
||||||
|
|
||||||
# Only keep track of these if we are copying rebuilt cache entries
|
|
||||||
if spack_buildcache_copy:
|
|
||||||
# TODO: This assumes signed version of the spec
|
|
||||||
buildcache_copies[release_spec_dag_hash] = [
|
|
||||||
{
|
|
||||||
"src": url_util.join(
|
|
||||||
buildcache_copy_src_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
|
||||||
),
|
|
||||||
"dest": url_util.join(
|
|
||||||
buildcache_copy_dest_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"src": url_util.join(
|
|
||||||
buildcache_copy_src_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_path_name(release_spec, ".spack"),
|
|
||||||
),
|
|
||||||
"dest": url_util.join(
|
|
||||||
buildcache_copy_dest_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_path_name(release_spec, ".spack"),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
if artifacts_root:
|
|
||||||
job_object["needs"].append(
|
|
||||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
|
||||||
)
|
|
||||||
|
|
||||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
|
||||||
|
|
||||||
if cdash_handler:
|
|
||||||
cdash_handler.current_spec = release_spec
|
|
||||||
build_name = cdash_handler.build_name
|
|
||||||
all_job_names.append(build_name)
|
|
||||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
|
||||||
|
|
||||||
build_stamp = cdash_handler.build_stamp
|
|
||||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
|
||||||
|
|
||||||
job_object["artifacts"] = spack.config.merge_yaml(
|
|
||||||
job_object.get("artifacts", {}),
|
|
||||||
{
|
|
||||||
"when": "always",
|
|
||||||
"paths": [
|
|
||||||
rel_job_log_dir,
|
|
||||||
rel_job_repro_dir,
|
|
||||||
rel_job_test_dir,
|
|
||||||
rel_user_artifacts_dir,
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
job_object["needs"] = []
|
||||||
|
if spec_label in dependencies:
|
||||||
if enable_artifacts_buildcache:
|
if enable_artifacts_buildcache:
|
||||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
# Get dependencies transitively, so they're all
|
||||||
job_object["artifacts"]["paths"].extend(
|
# available in the artifacts buildcache.
|
||||||
[
|
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||||
os.path.join(bc_root, p)
|
else:
|
||||||
for p in [
|
# In this case, "needs" is only used for scheduling
|
||||||
bindist.tarball_name(release_spec, ".spec.json"),
|
# purposes, so we only get the direct dependencies.
|
||||||
bindist.tarball_directory_name(release_spec),
|
dep_jobs = []
|
||||||
]
|
for dep_label in dependencies[spec_label]:
|
||||||
]
|
dep_jobs.append(spec_labels[dep_label])
|
||||||
|
|
||||||
|
job_object["needs"].extend(
|
||||||
|
_format_job_needs(
|
||||||
|
dep_jobs,
|
||||||
|
osname,
|
||||||
|
build_group,
|
||||||
|
prune_dag,
|
||||||
|
rebuild_decisions,
|
||||||
|
enable_artifacts_buildcache,
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
job_object["stage"] = stage_name
|
rebuild_spec = spec_record.rebuild
|
||||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
|
||||||
job_object["interruptible"] = True
|
|
||||||
|
|
||||||
length_needs = len(job_object["needs"])
|
if not rebuild_spec and not copy_only_pipeline:
|
||||||
if length_needs > max_length_needs:
|
if prune_dag:
|
||||||
max_length_needs = length_needs
|
spec_record.reason = "Pruned, up-to-date"
|
||||||
max_needs_job = job_name
|
continue
|
||||||
|
else:
|
||||||
|
# DAG pruning is disabled, force the spec to rebuild. The
|
||||||
|
# record still contains any mirrors on which the spec
|
||||||
|
# may have been found, so we can print them in the staging
|
||||||
|
# summary.
|
||||||
|
spec_record.rebuild = True
|
||||||
|
spec_record.reason = "Scheduled, DAG pruning disabled"
|
||||||
|
|
||||||
if spack_pipeline_type != "spack_copy_only":
|
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||||
output_object[job_name] = job_object
|
known_broken_specs_encountered.append(release_spec_dag_hash)
|
||||||
job_id += 1
|
|
||||||
|
# Only keep track of these if we are copying rebuilt cache entries
|
||||||
|
if spack_buildcache_copy:
|
||||||
|
# TODO: This assumes signed version of the spec
|
||||||
|
buildcache_copies[release_spec_dag_hash] = [
|
||||||
|
{
|
||||||
|
"src": url_util.join(
|
||||||
|
buildcache_copy_src_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||||
|
),
|
||||||
|
"dest": url_util.join(
|
||||||
|
buildcache_copy_dest_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": url_util.join(
|
||||||
|
buildcache_copy_src_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_path_name(release_spec, ".spack"),
|
||||||
|
),
|
||||||
|
"dest": url_util.join(
|
||||||
|
buildcache_copy_dest_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_path_name(release_spec, ".spack"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
if artifacts_root:
|
||||||
|
job_object["needs"].append(
|
||||||
|
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||||
|
# whether DAG pruning was enabled or not.
|
||||||
|
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||||
|
|
||||||
|
if cdash_handler:
|
||||||
|
cdash_handler.current_spec = release_spec
|
||||||
|
build_name = cdash_handler.build_name
|
||||||
|
all_job_names.append(build_name)
|
||||||
|
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||||
|
|
||||||
|
build_stamp = cdash_handler.build_stamp
|
||||||
|
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||||
|
|
||||||
|
job_object["artifacts"] = spack.config.merge_yaml(
|
||||||
|
job_object.get("artifacts", {}),
|
||||||
|
{
|
||||||
|
"when": "always",
|
||||||
|
"paths": [
|
||||||
|
rel_job_log_dir,
|
||||||
|
rel_job_repro_dir,
|
||||||
|
rel_job_test_dir,
|
||||||
|
rel_user_artifacts_dir,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if enable_artifacts_buildcache:
|
||||||
|
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||||
|
job_object["artifacts"]["paths"].extend(
|
||||||
|
[
|
||||||
|
os.path.join(bc_root, p)
|
||||||
|
for p in [
|
||||||
|
bindist.tarball_name(release_spec, ".spec.json"),
|
||||||
|
bindist.tarball_directory_name(release_spec),
|
||||||
|
]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
job_object["stage"] = stage_name
|
||||||
|
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||||
|
job_object["interruptible"] = True
|
||||||
|
|
||||||
|
length_needs = len(job_object["needs"])
|
||||||
|
if length_needs > max_length_needs:
|
||||||
|
max_length_needs = length_needs
|
||||||
|
max_needs_job = job_name
|
||||||
|
|
||||||
|
if not copy_only_pipeline:
|
||||||
|
output_object[job_name] = job_object
|
||||||
|
job_id += 1
|
||||||
|
|
||||||
if print_summary:
|
if print_summary:
|
||||||
for phase in phases:
|
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||||
phase_name = phase["name"]
|
|
||||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
# Clean up remote mirror override if enabled
|
||||||
phase_stages = staged_phases[phase_name]
|
if remote_mirror_override:
|
||||||
_print_staging_summary(*phase_stages)
|
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||||
|
if spack_pipeline_type == "spack_pull_request":
|
||||||
|
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||||
|
|
||||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||||
|
|
||||||
@@ -1330,7 +1187,7 @@ def main_script_replacements(cmd):
|
|||||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||||
}
|
}
|
||||||
|
|
||||||
if spack_pipeline_type == "spack_copy_only":
|
if copy_only_pipeline and not config_deprecated:
|
||||||
stage_names.append("copy")
|
stage_names.append("copy")
|
||||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||||
sync_job["stage"] = "copy"
|
sync_job["stage"] = "copy"
|
||||||
@@ -1474,12 +1331,18 @@ def main_script_replacements(cmd):
|
|||||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||||
else:
|
else:
|
||||||
# No jobs were generated
|
# No jobs were generated
|
||||||
tty.debug("No specs to rebuild, generating no-op job")
|
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
|
|
||||||
noop_job["retry"] = service_job_retries
|
noop_job["retry"] = service_job_retries
|
||||||
|
|
||||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
if copy_only_pipeline and config_deprecated:
|
||||||
|
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||||
|
noop_job["script"] = [
|
||||||
|
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||||
|
]
|
||||||
|
sorted_output = {"unsupported-copy": noop_job}
|
||||||
|
else:
|
||||||
|
tty.debug("No specs to rebuild, generating no-op job")
|
||||||
|
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||||
|
|
||||||
if known_broken_specs_encountered:
|
if known_broken_specs_encountered:
|
||||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||||
@@ -1560,44 +1423,6 @@ def can_verify_binaries():
|
|||||||
return len(gpg_util.public_keys()) >= 1
|
return len(gpg_util.public_keys()) >= 1
|
||||||
|
|
||||||
|
|
||||||
def configure_compilers(compiler_action, scope=None):
|
|
||||||
"""Depending on the compiler_action parameter, either turn on the
|
|
||||||
install_missing_compilers config option, or find spack compilers,
|
|
||||||
or do nothing. This is used from rebuild jobs in bootstrapping
|
|
||||||
pipelines, where in the bootsrapping phase we would pass
|
|
||||||
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
|
||||||
spec building phase we would pass INSTALL_MISSING in order to get
|
|
||||||
spack to use the compiler which was built in the previous phase and
|
|
||||||
is now sitting in the binary mirror.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
|
||||||
described above. Any other value essentially results in a no-op.
|
|
||||||
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
|
||||||
compilers, in case 'FIND_ANY' was provided.
|
|
||||||
"""
|
|
||||||
if compiler_action == "INSTALL_MISSING":
|
|
||||||
tty.debug("Make sure bootstrapped compiler will be installed")
|
|
||||||
config = cfg.get("config")
|
|
||||||
config["install_missing_compilers"] = True
|
|
||||||
cfg.set("config", config)
|
|
||||||
elif compiler_action == "FIND_ANY":
|
|
||||||
tty.debug("Just find any available compiler")
|
|
||||||
find_args = ["find"]
|
|
||||||
if scope:
|
|
||||||
find_args.extend(["--scope", scope])
|
|
||||||
output = spack_compiler(*find_args)
|
|
||||||
tty.debug("spack compiler find")
|
|
||||||
tty.debug(output)
|
|
||||||
output = spack_compiler("list")
|
|
||||||
tty.debug("spack compiler list")
|
|
||||||
tty.debug(output)
|
|
||||||
else:
|
|
||||||
tty.debug("No compiler action to be taken")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||||
"""Unchecked version of the public API, for easier mocking"""
|
"""Unchecked version of the public API, for easier mocking"""
|
||||||
unsigned = not sign_binaries
|
unsigned = not sign_binaries
|
||||||
@@ -2303,7 +2128,7 @@ def run_standalone_tests(**kwargs):
|
|||||||
tty.debug("spack test exited {0}".format(exit_code))
|
tty.debug("spack test exited {0}".format(exit_code))
|
||||||
|
|
||||||
|
|
||||||
class CDashHandler(object):
|
class CDashHandler:
|
||||||
"""
|
"""
|
||||||
Class for managing CDash data and processing.
|
Class for managing CDash data and processing.
|
||||||
"""
|
"""
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -149,7 +147,7 @@ def get_command(cmd_name):
|
|||||||
return getattr(get_module(cmd_name), pname)
|
return getattr(get_module(cmd_name), pname)
|
||||||
|
|
||||||
|
|
||||||
class _UnquotedFlags(object):
|
class _UnquotedFlags:
|
||||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||||
|
|
||||||
@@ -347,7 +345,7 @@ def iter_groups(specs, indent, all_headers):
|
|||||||
spack.spec.architecture_color,
|
spack.spec.architecture_color,
|
||||||
architecture if architecture else "no arch",
|
architecture if architecture else "no arch",
|
||||||
spack.spec.compiler_color,
|
spack.spec.compiler_color,
|
||||||
f"{compiler.name}@{compiler.version}" if compiler else "no compiler",
|
f"{compiler.display_str}" if compiler else "no compiler",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Sometimes we want to display specs that are not yet concretized.
|
# Sometimes we want to display specs that are not yet concretized.
|
||||||
@@ -547,7 +545,7 @@ class PythonNameError(spack.error.SpackError):
|
|||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
super(PythonNameError, self).__init__("{0} is not a permissible Python name.".format(name))
|
super().__init__("{0} is not a permissible Python name.".format(name))
|
||||||
|
|
||||||
|
|
||||||
class CommandNameError(spack.error.SpackError):
|
class CommandNameError(spack.error.SpackError):
|
||||||
@@ -555,9 +553,7 @@ class CommandNameError(spack.error.SpackError):
|
|||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
super(CommandNameError, self).__init__(
|
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||||
"{0} is not a permissible Spack command name.".format(name)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
@@ -43,13 +43,6 @@ def setup_parser(subparser):
|
|||||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||||
|
|
||||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||||
# TODO: remove from Spack 0.21
|
|
||||||
push.add_argument(
|
|
||||||
"-r",
|
|
||||||
"--rel",
|
|
||||||
action="store_true",
|
|
||||||
help="make all rpaths relative before creating tarballs. (deprecated)",
|
|
||||||
)
|
|
||||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||||
push.add_argument(
|
push.add_argument(
|
||||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||||
@@ -63,37 +56,7 @@ def setup_parser(subparser):
|
|||||||
push.add_argument(
|
push.add_argument(
|
||||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||||
)
|
)
|
||||||
output = push.add_mutually_exclusive_group(required=False)
|
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||||
# TODO: remove from Spack 0.21
|
|
||||||
output.add_argument(
|
|
||||||
"-d",
|
|
||||||
"--directory",
|
|
||||||
metavar="directory",
|
|
||||||
dest="mirror_flag",
|
|
||||||
type=arguments.mirror_directory,
|
|
||||||
help="local directory where buildcaches will be written. (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove from Spack 0.21
|
|
||||||
output.add_argument(
|
|
||||||
"-m",
|
|
||||||
"--mirror-name",
|
|
||||||
metavar="mirror-name",
|
|
||||||
dest="mirror_flag",
|
|
||||||
type=arguments.mirror_name,
|
|
||||||
help="name of the mirror where buildcaches will be written. (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove from Spack 0.21
|
|
||||||
output.add_argument(
|
|
||||||
"--mirror-url",
|
|
||||||
metavar="mirror-url",
|
|
||||||
dest="mirror_flag",
|
|
||||||
type=arguments.mirror_url,
|
|
||||||
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
|
||||||
)
|
|
||||||
# Unfortunately we cannot add this to the mutually exclusive group above,
|
|
||||||
# because we have further positional arguments.
|
|
||||||
# TODO: require from Spack 0.21
|
|
||||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
|
||||||
push.add_argument(
|
push.add_argument(
|
||||||
"--update-index",
|
"--update-index",
|
||||||
"--rebuild-index",
|
"--rebuild-index",
|
||||||
@@ -127,13 +90,6 @@ def setup_parser(subparser):
|
|||||||
install.add_argument(
|
install.add_argument(
|
||||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||||
)
|
)
|
||||||
# TODO: remove from Spack 0.21
|
|
||||||
install.add_argument(
|
|
||||||
"-a",
|
|
||||||
"--allow-root",
|
|
||||||
action="store_true",
|
|
||||||
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
|
||||||
)
|
|
||||||
install.add_argument(
|
install.add_argument(
|
||||||
"-u",
|
"-u",
|
||||||
"--unsigned",
|
"--unsigned",
|
||||||
@@ -268,75 +224,21 @@ def setup_parser(subparser):
|
|||||||
# Sync buildcache entries from one mirror to another
|
# Sync buildcache entries from one mirror to another
|
||||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||||
sync.add_argument(
|
sync.add_argument(
|
||||||
"--manifest-glob",
|
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
|
||||||
default=None,
|
|
||||||
help="A quoted glob pattern identifying copy manifest files",
|
|
||||||
)
|
)
|
||||||
source = sync.add_mutually_exclusive_group(required=False)
|
sync.add_argument(
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
source.add_argument(
|
|
||||||
"--src-directory",
|
|
||||||
metavar="DIRECTORY",
|
|
||||||
dest="src_mirror_flag",
|
|
||||||
type=arguments.mirror_directory,
|
|
||||||
help="Source mirror as a local file path (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
source.add_argument(
|
|
||||||
"--src-mirror-name",
|
|
||||||
metavar="MIRROR_NAME",
|
|
||||||
dest="src_mirror_flag",
|
|
||||||
type=arguments.mirror_name,
|
|
||||||
help="Name of the source mirror (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
source.add_argument(
|
|
||||||
"--src-mirror-url",
|
|
||||||
metavar="MIRROR_URL",
|
|
||||||
dest="src_mirror_flag",
|
|
||||||
type=arguments.mirror_url,
|
|
||||||
help="URL of the source mirror (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: only support this in 0.21
|
|
||||||
source.add_argument(
|
|
||||||
"src_mirror",
|
"src_mirror",
|
||||||
metavar="source mirror",
|
metavar="source mirror",
|
||||||
type=arguments.mirror_name_or_url,
|
type=arguments.mirror_name_or_url,
|
||||||
help="Source mirror name, path, or URL",
|
|
||||||
nargs="?",
|
nargs="?",
|
||||||
|
help="Source mirror name, path, or URL",
|
||||||
)
|
)
|
||||||
dest = sync.add_mutually_exclusive_group(required=False)
|
sync.add_argument(
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
dest.add_argument(
|
|
||||||
"--dest-directory",
|
|
||||||
metavar="DIRECTORY",
|
|
||||||
dest="dest_mirror_flag",
|
|
||||||
type=arguments.mirror_directory,
|
|
||||||
help="Destination mirror as a local file path (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
dest.add_argument(
|
|
||||||
"--dest-mirror-name",
|
|
||||||
metavar="MIRROR_NAME",
|
|
||||||
type=arguments.mirror_name,
|
|
||||||
dest="dest_mirror_flag",
|
|
||||||
help="Name of the destination mirror (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
dest.add_argument(
|
|
||||||
"--dest-mirror-url",
|
|
||||||
metavar="MIRROR_URL",
|
|
||||||
dest="dest_mirror_flag",
|
|
||||||
type=arguments.mirror_url,
|
|
||||||
help="URL of the destination mirror (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: only support this in 0.21
|
|
||||||
dest.add_argument(
|
|
||||||
"dest_mirror",
|
"dest_mirror",
|
||||||
metavar="destination mirror",
|
metavar="destination mirror",
|
||||||
type=arguments.mirror_name_or_url,
|
type=arguments.mirror_name_or_url,
|
||||||
help="Destination mirror name, path, or URL",
|
|
||||||
nargs="?",
|
nargs="?",
|
||||||
|
help="Destination mirror name, path, or URL",
|
||||||
)
|
)
|
||||||
sync.set_defaults(func=sync_fn)
|
sync.set_defaults(func=sync_fn)
|
||||||
|
|
||||||
@@ -344,39 +246,8 @@ def setup_parser(subparser):
|
|||||||
update_index = subparsers.add_parser(
|
update_index = subparsers.add_parser(
|
||||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||||
)
|
)
|
||||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
update_index.add_argument(
|
||||||
# TODO: remove in Spack 0.21
|
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||||
update_index_out.add_argument(
|
|
||||||
"-d",
|
|
||||||
"--directory",
|
|
||||||
metavar="directory",
|
|
||||||
dest="mirror_flag",
|
|
||||||
type=arguments.mirror_directory,
|
|
||||||
help="local directory where buildcaches will be written (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
update_index_out.add_argument(
|
|
||||||
"-m",
|
|
||||||
"--mirror-name",
|
|
||||||
metavar="mirror-name",
|
|
||||||
dest="mirror_flag",
|
|
||||||
type=arguments.mirror_name,
|
|
||||||
help="name of the mirror where buildcaches will be written (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: remove in Spack 0.21
|
|
||||||
update_index_out.add_argument(
|
|
||||||
"--mirror-url",
|
|
||||||
metavar="mirror-url",
|
|
||||||
dest="mirror_flag",
|
|
||||||
type=arguments.mirror_url,
|
|
||||||
help="URL of the mirror where buildcaches will be written (deprecated)",
|
|
||||||
)
|
|
||||||
# TODO: require from Spack 0.21
|
|
||||||
update_index_out.add_argument(
|
|
||||||
"mirror",
|
|
||||||
type=arguments.mirror_name_or_url,
|
|
||||||
help="Destination mirror name, path, or URL",
|
|
||||||
nargs="?",
|
|
||||||
)
|
)
|
||||||
update_index.add_argument(
|
update_index.add_argument(
|
||||||
"-k",
|
"-k",
|
||||||
@@ -436,32 +307,12 @@ def _concrete_spec_from_args(args):
|
|||||||
|
|
||||||
def push_fn(args):
|
def push_fn(args):
|
||||||
"""create a binary package and push it to a mirror"""
|
"""create a binary package and push it to a mirror"""
|
||||||
if args.mirror_flag:
|
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||||
mirror = args.mirror_flag
|
|
||||||
elif not args.mirror:
|
|
||||||
raise ValueError("No mirror provided")
|
|
||||||
else:
|
|
||||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
|
||||||
|
|
||||||
if args.mirror_flag:
|
|
||||||
tty.warn(
|
|
||||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
|
||||||
"Spack 0.21, use positional arguments instead."
|
|
||||||
)
|
|
||||||
|
|
||||||
if args.rel:
|
|
||||||
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
|
||||||
|
|
||||||
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
|
||||||
# spec is in the positional mirror arg due to argparse limitations.
|
|
||||||
input_specs = args.specs
|
|
||||||
if args.mirror_flag and args.mirror:
|
|
||||||
input_specs.insert(0, args.mirror)
|
|
||||||
|
|
||||||
url = mirror.push_url
|
url = mirror.push_url
|
||||||
|
|
||||||
specs = bindist.specs_to_be_packaged(
|
specs = bindist.specs_to_be_packaged(
|
||||||
_matching_specs(input_specs, args.spec_file),
|
_matching_specs(args.specs, args.spec_file),
|
||||||
root="package" in args.things_to_install,
|
root="package" in args.things_to_install,
|
||||||
dependencies="dependencies" in args.things_to_install,
|
dependencies="dependencies" in args.things_to_install,
|
||||||
)
|
)
|
||||||
@@ -486,7 +337,6 @@ def push_fn(args):
|
|||||||
url,
|
url,
|
||||||
bindist.PushOptions(
|
bindist.PushOptions(
|
||||||
force=args.force,
|
force=args.force,
|
||||||
relative=args.rel,
|
|
||||||
unsigned=args.unsigned,
|
unsigned=args.unsigned,
|
||||||
allow_root=args.allow_root,
|
allow_root=args.allow_root,
|
||||||
key=args.key,
|
key=args.key,
|
||||||
@@ -524,9 +374,6 @@ def install_fn(args):
|
|||||||
if not args.specs:
|
if not args.specs:
|
||||||
tty.die("a spec argument is required to install from a buildcache")
|
tty.die("a spec argument is required to install from a buildcache")
|
||||||
|
|
||||||
if args.allow_root:
|
|
||||||
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
|
||||||
|
|
||||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||||
for match in matches:
|
for match in matches:
|
||||||
@@ -710,21 +557,11 @@ def sync_fn(args):
|
|||||||
manifest_copy(glob.glob(args.manifest_glob))
|
manifest_copy(glob.glob(args.manifest_glob))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
# If no manifest_glob, require a source and dest mirror.
|
if args.src_mirror is None or args.dest_mirror is None:
|
||||||
# TODO: Simplify in Spack 0.21
|
tty.die("Provide mirrors to sync from and to.")
|
||||||
if not (args.src_mirror_flag or args.src_mirror) or not (
|
|
||||||
args.dest_mirror_flag or args.dest_mirror
|
|
||||||
):
|
|
||||||
raise ValueError("Source and destination mirror are required.")
|
|
||||||
|
|
||||||
if args.src_mirror_flag or args.dest_mirror_flag:
|
src_mirror = args.src_mirror
|
||||||
tty.warn(
|
dest_mirror = args.dest_mirror
|
||||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
|
||||||
"Spack 0.21, use positional arguments instead."
|
|
||||||
)
|
|
||||||
|
|
||||||
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
|
||||||
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
|
||||||
|
|
||||||
src_mirror_url = src_mirror.fetch_url
|
src_mirror_url = src_mirror.fetch_url
|
||||||
dest_mirror_url = dest_mirror.push_url
|
dest_mirror_url = dest_mirror.push_url
|
||||||
@@ -803,13 +640,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
|||||||
|
|
||||||
def update_index_fn(args):
|
def update_index_fn(args):
|
||||||
"""Update a buildcache index."""
|
"""Update a buildcache index."""
|
||||||
if args.mirror_flag:
|
update_index(args.mirror, update_keys=args.keys)
|
||||||
tty.warn(
|
|
||||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
|
||||||
"Spack 0.21, use positional arguments instead."
|
|
||||||
)
|
|
||||||
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
|
||||||
update_index(mirror, update_keys=args.keys)
|
|
||||||
|
|
||||||
|
|
||||||
def buildcache(parser, args):
|
def buildcache(parser, args):
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@@ -228,7 +228,7 @@ def ci_reindex(args):
|
|||||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||||
index for the associated mirror."""
|
index for the associated mirror."""
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||||
yaml_root = ev.config_dict(env.manifest)
|
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||||
|
|
||||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||||
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
||||||
@@ -274,7 +274,6 @@ def ci_rebuild(args):
|
|||||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||||
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
|
||||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||||
@@ -295,7 +294,6 @@ def ci_rebuild(args):
|
|||||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||||
tty.debug("compiler_action = {0}".format(compiler_action))
|
|
||||||
|
|
||||||
# Query the environment manifest to find out whether we're reporting to a
|
# Query the environment manifest to find out whether we're reporting to a
|
||||||
# CDash instance, and if so, gather some information from the manifest to
|
# CDash instance, and if so, gather some information from the manifest to
|
||||||
@@ -411,14 +409,6 @@ def ci_rebuild(args):
|
|||||||
if signing_key:
|
if signing_key:
|
||||||
spack_ci.import_signing_key(signing_key)
|
spack_ci.import_signing_key(signing_key)
|
||||||
|
|
||||||
# Depending on the specifics of this job, we might need to turn on the
|
|
||||||
# "config:install_missing compilers" option (to build this job spec
|
|
||||||
# with a bootstrapped compiler), or possibly run "spack compiler find"
|
|
||||||
# (to build a bootstrap compiler or one of its deps in a
|
|
||||||
# compiler-agnostic way), or maybe do nothing at all (to build a spec
|
|
||||||
# using a compiler already installed on the target system).
|
|
||||||
spack_ci.configure_compilers(compiler_action)
|
|
||||||
|
|
||||||
# Write this job's spec json into the reproduction directory, and it will
|
# Write this job's spec json into the reproduction directory, and it will
|
||||||
# also be used in the generated "spack install" command to install the spec
|
# also be used in the generated "spack install" command to install the spec
|
||||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||||
|
@@ -3,17 +3,22 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from argparse import ArgumentParser, Namespace
|
||||||
|
from typing import IO, Any, Callable, Dict, Sequence, Set
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
|
from llnl.util.argparsewriter import (
|
||||||
|
ArgparseCompletionWriter,
|
||||||
|
ArgparseRstWriter,
|
||||||
|
ArgparseWriter,
|
||||||
|
Command,
|
||||||
|
)
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -27,12 +32,12 @@
|
|||||||
|
|
||||||
|
|
||||||
#: list of command formatters
|
#: list of command formatters
|
||||||
formatters = {}
|
formatters: Dict[str, Callable[[Namespace, IO], None]] = {}
|
||||||
|
|
||||||
|
|
||||||
#: standard arguments for updating completion scripts
|
#: standard arguments for updating completion scripts
|
||||||
#: we iterate through these when called with --update-completion
|
#: we iterate through these when called with --update-completion
|
||||||
update_completion_args = {
|
update_completion_args: Dict[str, Dict[str, Any]] = {
|
||||||
"bash": {
|
"bash": {
|
||||||
"aliases": True,
|
"aliases": True,
|
||||||
"format": "bash",
|
"format": "bash",
|
||||||
@@ -42,13 +47,25 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def formatter(func):
|
def formatter(func: Callable[[Namespace, IO], None]) -> Callable[[Namespace, IO], None]:
|
||||||
"""Decorator used to register formatters"""
|
"""Decorator used to register formatters.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
func: Formatting function.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The same function.
|
||||||
|
"""
|
||||||
formatters[func.__name__] = func
|
formatters[func.__name__] = func
|
||||||
return func
|
return func
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser: ArgumentParser) -> None:
|
||||||
|
"""Set up the argument parser.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
subparser: Preliminary argument parser.
|
||||||
|
"""
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--update-completion",
|
"--update-completion",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -91,18 +108,34 @@ class SpackArgparseRstWriter(ArgparseRstWriter):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
prog,
|
prog: str,
|
||||||
out=None,
|
out: IO = sys.stdout,
|
||||||
aliases=False,
|
aliases: bool = False,
|
||||||
documented_commands=[],
|
documented_commands: Set[str] = set(),
|
||||||
rst_levels=["-", "-", "^", "~", ":", "`"],
|
rst_levels: Sequence[str] = ["-", "-", "^", "~", ":", "`"],
|
||||||
):
|
):
|
||||||
out = sys.stdout if out is None else out
|
"""Initialize a new SpackArgparseRstWriter instance.
|
||||||
super(SpackArgparseRstWriter, self).__init__(prog, out, aliases, rst_levels)
|
|
||||||
|
Args:
|
||||||
|
prog: Program name.
|
||||||
|
out: File object to write to.
|
||||||
|
aliases: Whether or not to include subparsers for aliases.
|
||||||
|
documented_commands: Set of commands with additional documentation.
|
||||||
|
rst_levels: List of characters for rst section headings.
|
||||||
|
"""
|
||||||
|
super().__init__(prog, out, aliases, rst_levels)
|
||||||
self.documented = documented_commands
|
self.documented = documented_commands
|
||||||
|
|
||||||
def usage(self, *args):
|
def usage(self, usage: str) -> str:
|
||||||
string = super(SpackArgparseRstWriter, self).usage(*args)
|
"""Example usage of a command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
usage: Command usage.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Usage of a command.
|
||||||
|
"""
|
||||||
|
string = super().usage(usage)
|
||||||
|
|
||||||
cmd = self.parser.prog.replace(" ", "-")
|
cmd = self.parser.prog.replace(" ", "-")
|
||||||
if cmd in self.documented:
|
if cmd in self.documented:
|
||||||
@@ -112,11 +145,21 @@ def usage(self, *args):
|
|||||||
|
|
||||||
|
|
||||||
class SubcommandWriter(ArgparseWriter):
|
class SubcommandWriter(ArgparseWriter):
|
||||||
def format(self, cmd):
|
"""Write argparse output as a list of subcommands."""
|
||||||
|
|
||||||
|
def format(self, cmd: Command) -> str:
|
||||||
|
"""Return the string representation of a single node in the parser tree.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cmd: Parsed information about a command or subcommand.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
String representation of this subcommand.
|
||||||
|
"""
|
||||||
return " " * self.level + cmd.prog + "\n"
|
return " " * self.level + cmd.prog + "\n"
|
||||||
|
|
||||||
|
|
||||||
_positional_to_subroutine = {
|
_positional_to_subroutine: Dict[str, str] = {
|
||||||
"package": "_all_packages",
|
"package": "_all_packages",
|
||||||
"spec": "_all_packages",
|
"spec": "_all_packages",
|
||||||
"filter": "_all_packages",
|
"filter": "_all_packages",
|
||||||
@@ -138,7 +181,19 @@ def format(self, cmd):
|
|||||||
class BashCompletionWriter(ArgparseCompletionWriter):
|
class BashCompletionWriter(ArgparseCompletionWriter):
|
||||||
"""Write argparse output as bash programmable tab completion."""
|
"""Write argparse output as bash programmable tab completion."""
|
||||||
|
|
||||||
def body(self, positionals, optionals, subcommands):
|
def body(
|
||||||
|
self, positionals: Sequence[str], optionals: Sequence[str], subcommands: Sequence[str]
|
||||||
|
) -> str:
|
||||||
|
"""Return the body of the function.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
positionals: List of positional arguments.
|
||||||
|
optionals: List of optional arguments.
|
||||||
|
subcommands: List of subcommand parsers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Function body.
|
||||||
|
"""
|
||||||
if positionals:
|
if positionals:
|
||||||
return """
|
return """
|
||||||
if $list_options
|
if $list_options
|
||||||
@@ -168,7 +223,15 @@ def body(self, positionals, optionals, subcommands):
|
|||||||
self.optionals(optionals)
|
self.optionals(optionals)
|
||||||
)
|
)
|
||||||
|
|
||||||
def positionals(self, positionals):
|
def positionals(self, positionals: Sequence[str]) -> str:
|
||||||
|
"""Return the syntax for reporting positional arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
positionals: List of positional arguments.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Syntax for positional arguments.
|
||||||
|
"""
|
||||||
# If match found, return function name
|
# If match found, return function name
|
||||||
for positional in positionals:
|
for positional in positionals:
|
||||||
for key, value in _positional_to_subroutine.items():
|
for key, value in _positional_to_subroutine.items():
|
||||||
@@ -178,22 +241,49 @@ def positionals(self, positionals):
|
|||||||
# If no matches found, return empty list
|
# If no matches found, return empty list
|
||||||
return 'SPACK_COMPREPLY=""'
|
return 'SPACK_COMPREPLY=""'
|
||||||
|
|
||||||
def optionals(self, optionals):
|
def optionals(self, optionals: Sequence[str]) -> str:
|
||||||
|
"""Return the syntax for reporting optional flags.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
optionals: List of optional arguments.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Syntax for optional flags.
|
||||||
|
"""
|
||||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
|
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
|
||||||
|
|
||||||
def subcommands(self, subcommands):
|
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||||
|
"""Return the syntax for reporting subcommands.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
subcommands: List of subcommand parsers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Syntax for subcommand parsers
|
||||||
|
"""
|
||||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
||||||
|
|
||||||
|
|
||||||
@formatter
|
@formatter
|
||||||
def subcommands(args, out):
|
def subcommands(args: Namespace, out: IO) -> None:
|
||||||
|
"""Hierarchical tree of subcommands.
|
||||||
|
|
||||||
|
args:
|
||||||
|
args: Command-line arguments.
|
||||||
|
out: File object to write to.
|
||||||
|
"""
|
||||||
parser = spack.main.make_argument_parser()
|
parser = spack.main.make_argument_parser()
|
||||||
spack.main.add_all_commands(parser)
|
spack.main.add_all_commands(parser)
|
||||||
writer = SubcommandWriter(parser.prog, out, args.aliases)
|
writer = SubcommandWriter(parser.prog, out, args.aliases)
|
||||||
writer.write(parser)
|
writer.write(parser)
|
||||||
|
|
||||||
|
|
||||||
def rst_index(out):
|
def rst_index(out: IO) -> None:
|
||||||
|
"""Generate an index of all commands.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
out: File object to write to.
|
||||||
|
"""
|
||||||
out.write("\n")
|
out.write("\n")
|
||||||
|
|
||||||
index = spack.main.index_commands()
|
index = spack.main.index_commands()
|
||||||
@@ -221,13 +311,19 @@ def rst_index(out):
|
|||||||
|
|
||||||
|
|
||||||
@formatter
|
@formatter
|
||||||
def rst(args, out):
|
def rst(args: Namespace, out: IO) -> None:
|
||||||
|
"""ReStructuredText documentation of subcommands.
|
||||||
|
|
||||||
|
args:
|
||||||
|
args: Command-line arguments.
|
||||||
|
out: File object to write to.
|
||||||
|
"""
|
||||||
# create a parser with all commands
|
# create a parser with all commands
|
||||||
parser = spack.main.make_argument_parser()
|
parser = spack.main.make_argument_parser()
|
||||||
spack.main.add_all_commands(parser)
|
spack.main.add_all_commands(parser)
|
||||||
|
|
||||||
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
||||||
documented_commands = set()
|
documented_commands: Set[str] = set()
|
||||||
for filename in args.rst_files:
|
for filename in args.rst_files:
|
||||||
with open(filename) as f:
|
with open(filename) as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
@@ -245,7 +341,13 @@ def rst(args, out):
|
|||||||
|
|
||||||
|
|
||||||
@formatter
|
@formatter
|
||||||
def names(args, out):
|
def names(args: Namespace, out: IO) -> None:
|
||||||
|
"""Simple list of top-level commands.
|
||||||
|
|
||||||
|
args:
|
||||||
|
args: Command-line arguments.
|
||||||
|
out: File object to write to.
|
||||||
|
"""
|
||||||
commands = copy.copy(spack.cmd.all_commands())
|
commands = copy.copy(spack.cmd.all_commands())
|
||||||
|
|
||||||
if args.aliases:
|
if args.aliases:
|
||||||
@@ -255,7 +357,13 @@ def names(args, out):
|
|||||||
|
|
||||||
|
|
||||||
@formatter
|
@formatter
|
||||||
def bash(args, out):
|
def bash(args: Namespace, out: IO) -> None:
|
||||||
|
"""Bash tab-completion script.
|
||||||
|
|
||||||
|
args:
|
||||||
|
args: Command-line arguments.
|
||||||
|
out: File object to write to.
|
||||||
|
"""
|
||||||
parser = spack.main.make_argument_parser()
|
parser = spack.main.make_argument_parser()
|
||||||
spack.main.add_all_commands(parser)
|
spack.main.add_all_commands(parser)
|
||||||
|
|
||||||
@@ -263,7 +371,13 @@ def bash(args, out):
|
|||||||
writer.write(parser)
|
writer.write(parser)
|
||||||
|
|
||||||
|
|
||||||
def prepend_header(args, out):
|
def prepend_header(args: Namespace, out: IO) -> None:
|
||||||
|
"""Prepend header text at the beginning of a file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
args: Command-line arguments.
|
||||||
|
out: File object to write to.
|
||||||
|
"""
|
||||||
if not args.header:
|
if not args.header:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -271,10 +385,14 @@ def prepend_header(args, out):
|
|||||||
out.write(header.read())
|
out.write(header.read())
|
||||||
|
|
||||||
|
|
||||||
def _commands(parser, args):
|
def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||||
"""This is the 'regular' command, which can be called multiple times.
|
"""This is the 'regular' command, which can be called multiple times.
|
||||||
|
|
||||||
See ``commands()`` below for ``--update-completion`` handling.
|
See ``commands()`` below for ``--update-completion`` handling.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parser: Argument parser.
|
||||||
|
args: Command-line arguments.
|
||||||
"""
|
"""
|
||||||
formatter = formatters[args.format]
|
formatter = formatters[args.format]
|
||||||
|
|
||||||
@@ -296,12 +414,15 @@ def _commands(parser, args):
|
|||||||
formatter(args, sys.stdout)
|
formatter(args, sys.stdout)
|
||||||
|
|
||||||
|
|
||||||
def update_completion(parser, args):
|
def update_completion(parser: ArgumentParser, args: Namespace) -> None:
|
||||||
"""Iterate through the shells and update the standard completion files.
|
"""Iterate through the shells and update the standard completion files.
|
||||||
|
|
||||||
This is a convenience method to avoid calling this command many
|
This is a convenience method to avoid calling this command many
|
||||||
times, and to simplify completion update for developers.
|
times, and to simplify completion update for developers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parser: Argument parser.
|
||||||
|
args: Command-line arguments.
|
||||||
"""
|
"""
|
||||||
for shell, shell_args in update_completion_args.items():
|
for shell, shell_args in update_completion_args.items():
|
||||||
for attr, value in shell_args.items():
|
for attr, value in shell_args.items():
|
||||||
@@ -309,14 +430,20 @@ def update_completion(parser, args):
|
|||||||
_commands(parser, args)
|
_commands(parser, args)
|
||||||
|
|
||||||
|
|
||||||
def commands(parser, args):
|
def commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||||
|
"""Main function that calls formatter functions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parser: Argument parser.
|
||||||
|
args: Command-line arguments.
|
||||||
|
"""
|
||||||
if args.update_completion:
|
if args.update_completion:
|
||||||
if args.format != "names" or any([args.aliases, args.update, args.header]):
|
if args.format != "names" or any([args.aliases, args.update, args.header]):
|
||||||
tty.die("--update-completion can only be specified alone.")
|
tty.die("--update-completion can only be specified alone.")
|
||||||
|
|
||||||
# this runs the command multiple times with different arguments
|
# this runs the command multiple times with different arguments
|
||||||
return update_completion(parser, args)
|
update_completion(parser, args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# run commands normally
|
# run commands normally
|
||||||
return _commands(parser, args)
|
_commands(parser, args)
|
||||||
|
@@ -36,7 +36,10 @@ def shell_init_instructions(cmd, equivalent):
|
|||||||
" source %s/setup-env.fish" % spack.paths.share_path,
|
" source %s/setup-env.fish" % spack.paths.share_path,
|
||||||
"",
|
"",
|
||||||
color.colorize("@*c{For Windows batch:}"),
|
color.colorize("@*c{For Windows batch:}"),
|
||||||
" source %s/spack_cmd.bat" % spack.paths.share_path,
|
" %s\\spack_cmd.bat" % spack.paths.bin_path,
|
||||||
|
"",
|
||||||
|
color.colorize("@*c{For PowerShell:}"),
|
||||||
|
" %s\\setup-env.ps1" % spack.paths.share_path,
|
||||||
"",
|
"",
|
||||||
"Or, if you do not want to use shell support, run "
|
"Or, if you do not want to use shell support, run "
|
||||||
+ ("one of these" if shell_specific else "this")
|
+ ("one of these" if shell_specific else "this")
|
||||||
@@ -50,6 +53,7 @@ def shell_init_instructions(cmd, equivalent):
|
|||||||
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
||||||
equivalent.format(sh_arg="--fish") + " # fish",
|
equivalent.format(sh_arg="--fish") + " # fish",
|
||||||
equivalent.format(sh_arg="--bat ") + " # batch",
|
equivalent.format(sh_arg="--bat ") + " # batch",
|
||||||
|
equivalent.format(sh_arg="--pwsh") + " # powershell",
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
msg += [" " + equivalent]
|
msg += [" " + equivalent]
|
||||||
|
@@ -349,7 +349,7 @@ def install_status():
|
|||||||
"-I",
|
"-I",
|
||||||
"--install-status",
|
"--install-status",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=True,
|
||||||
help="show install status of packages. packages can be: "
|
help="show install status of packages. packages can be: "
|
||||||
"installed [+], missing and needed by an installed package [-], "
|
"installed [+], missing and needed by an installed package [-], "
|
||||||
"installed in and upstream instance [^], "
|
"installed in and upstream instance [^], "
|
||||||
@@ -357,6 +357,17 @@ def install_status():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@arg
|
||||||
|
def no_install_status():
|
||||||
|
return Args(
|
||||||
|
"--no-install-status",
|
||||||
|
dest="install_status",
|
||||||
|
action="store_false",
|
||||||
|
default=True,
|
||||||
|
help="do not show install status annotations",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@arg
|
@arg
|
||||||
def no_checksum():
|
def no_checksum():
|
||||||
return Args(
|
return Args(
|
||||||
@@ -468,7 +479,7 @@ def __init__(
|
|||||||
# substituting '_' for ':'.
|
# substituting '_' for ':'.
|
||||||
dest = dest.replace(":", "_")
|
dest = dest.replace(":", "_")
|
||||||
|
|
||||||
super(ConfigSetAction, self).__init__(
|
super().__init__(
|
||||||
option_strings=option_strings,
|
option_strings=option_strings,
|
||||||
dest=dest,
|
dest=dest,
|
||||||
nargs=0,
|
nargs=0,
|
||||||
|
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -53,7 +51,7 @@ def setup_parser(subparser):
|
|||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=scopes_metavar,
|
metavar=scopes_metavar,
|
||||||
default=spack.config.default_modify_scope("compilers"),
|
default=None,
|
||||||
help="configuration scope to modify",
|
help="configuration scope to modify",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -98,7 +96,7 @@ def compiler_find(args):
|
|||||||
config = spack.config.config
|
config = spack.config.config
|
||||||
filename = config.get_config_filename(args.scope, "compilers")
|
filename = config.get_config_filename(args.scope, "compilers")
|
||||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||||
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
|
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||||
else:
|
else:
|
||||||
tty.msg("Found no new compilers")
|
tty.msg("Found no new compilers")
|
||||||
tty.msg("Compilers are defined in the following files:")
|
tty.msg("Compilers are defined in the following files:")
|
||||||
@@ -106,19 +104,21 @@ def compiler_find(args):
|
|||||||
|
|
||||||
|
|
||||||
def compiler_remove(args):
|
def compiler_remove(args):
|
||||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||||
if not compilers:
|
|
||||||
tty.die("No compilers match spec %s" % cspec)
|
if not candidate_compilers:
|
||||||
elif not args.all and len(compilers) > 1:
|
tty.die("No compilers match spec %s" % compiler_spec)
|
||||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
|
||||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
if not args.all and len(candidate_compilers) > 1:
|
||||||
|
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||||
|
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
for compiler in compilers:
|
for current_compiler in candidate_compilers:
|
||||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||||
tty.msg("Removed compiler %s" % compiler.spec)
|
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||||
|
|
||||||
|
|
||||||
def compiler_info(args):
|
def compiler_info(args):
|
||||||
@@ -130,7 +130,7 @@ def compiler_info(args):
|
|||||||
tty.die("No compilers match spec %s" % cspec)
|
tty.die("No compilers match spec %s" % cspec)
|
||||||
else:
|
else:
|
||||||
for c in compilers:
|
for c in compilers:
|
||||||
print(str(c.spec) + ":")
|
print(c.spec.display_str + ":")
|
||||||
print("\tpaths:")
|
print("\tpaths:")
|
||||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||||
@@ -188,7 +188,7 @@ def compiler_list(args):
|
|||||||
os_str += "-%s" % target
|
os_str += "-%s" % target
|
||||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||||
tty.hline(colorize(cname), char="-")
|
tty.hline(colorize(cname), char="-")
|
||||||
colify(reversed(sorted(c.spec for c in compilers)))
|
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||||
|
|
||||||
|
|
||||||
def compiler(parser, args):
|
def compiler(parser, args):
|
||||||
|
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
@@ -71,7 +69,7 @@ class {class_name}({base_class_name}):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
class BundlePackageTemplate(object):
|
class BundlePackageTemplate:
|
||||||
"""
|
"""
|
||||||
Provides the default values to be used for a bundle package file template.
|
Provides the default values to be used for a bundle package file template.
|
||||||
"""
|
"""
|
||||||
@@ -122,7 +120,7 @@ def install(self, spec, prefix):
|
|||||||
url_line = ' url = "{url}"'
|
url_line = ' url = "{url}"'
|
||||||
|
|
||||||
def __init__(self, name, url, versions):
|
def __init__(self, name, url, versions):
|
||||||
super(PackageTemplate, self).__init__(name, versions)
|
super().__init__(name, versions)
|
||||||
|
|
||||||
self.url_def = self.url_line.format(url=url)
|
self.url_def = self.url_line.format(url=url)
|
||||||
|
|
||||||
@@ -200,7 +198,7 @@ def __init__(self, name, url, *args, **kwargs):
|
|||||||
# Make it more obvious that we are renaming the package
|
# Make it more obvious that we are renaming the package
|
||||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||||
name = "lua-{0}".format(name)
|
name = "lua-{0}".format(name)
|
||||||
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
super().__init__(name, url, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class MesonPackageTemplate(PackageTemplate):
|
class MesonPackageTemplate(PackageTemplate):
|
||||||
@@ -308,7 +306,7 @@ def __init__(self, name, url, *args, **kwargs):
|
|||||||
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
||||||
name = "rkt-{0}".format(name)
|
name = "rkt-{0}".format(name)
|
||||||
self.body_def = self.body_def.format(name[4:])
|
self.body_def = self.body_def.format(name[4:])
|
||||||
super(RacketPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
super().__init__(name, url, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class PythonPackageTemplate(PackageTemplate):
|
class PythonPackageTemplate(PackageTemplate):
|
||||||
@@ -400,7 +398,7 @@ def __init__(self, name, url, *args, **kwargs):
|
|||||||
+ self.url_line
|
+ self.url_line
|
||||||
)
|
)
|
||||||
|
|
||||||
super(PythonPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
super().__init__(name, url, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class RPackageTemplate(PackageTemplate):
|
class RPackageTemplate(PackageTemplate):
|
||||||
@@ -439,7 +437,7 @@ def __init__(self, name, url, *args, **kwargs):
|
|||||||
if bioc:
|
if bioc:
|
||||||
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
|
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
|
||||||
|
|
||||||
super(RPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
super().__init__(name, url, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class PerlmakePackageTemplate(PackageTemplate):
|
class PerlmakePackageTemplate(PackageTemplate):
|
||||||
@@ -466,7 +464,7 @@ def __init__(self, name, *args, **kwargs):
|
|||||||
tty.msg("Changing package name from {0} to perl-{0}".format(name))
|
tty.msg("Changing package name from {0} to perl-{0}".format(name))
|
||||||
name = "perl-{0}".format(name)
|
name = "perl-{0}".format(name)
|
||||||
|
|
||||||
super(PerlmakePackageTemplate, self).__init__(name, *args, **kwargs)
|
super().__init__(name, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
|
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
|
||||||
@@ -499,7 +497,7 @@ def __init__(self, name, *args, **kwargs):
|
|||||||
tty.msg("Changing package name from {0} to octave-{0}".format(name))
|
tty.msg("Changing package name from {0} to octave-{0}".format(name))
|
||||||
name = "octave-{0}".format(name)
|
name = "octave-{0}".format(name)
|
||||||
|
|
||||||
super(OctavePackageTemplate, self).__init__(name, *args, **kwargs)
|
super().__init__(name, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class RubyPackageTemplate(PackageTemplate):
|
class RubyPackageTemplate(PackageTemplate):
|
||||||
@@ -527,7 +525,7 @@ def __init__(self, name, *args, **kwargs):
|
|||||||
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
|
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
|
||||||
name = "ruby-{0}".format(name)
|
name = "ruby-{0}".format(name)
|
||||||
|
|
||||||
super(RubyPackageTemplate, self).__init__(name, *args, **kwargs)
|
super().__init__(name, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class MakefilePackageTemplate(PackageTemplate):
|
class MakefilePackageTemplate(PackageTemplate):
|
||||||
@@ -572,7 +570,7 @@ def __init__(self, name, *args, **kwargs):
|
|||||||
tty.msg("Changing package name from {0} to py-{0}".format(name))
|
tty.msg("Changing package name from {0} to py-{0}".format(name))
|
||||||
name = "py-{0}".format(name)
|
name = "py-{0}".format(name)
|
||||||
|
|
||||||
super(SIPPackageTemplate, self).__init__(name, *args, **kwargs)
|
super().__init__(name, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
templates = {
|
templates = {
|
||||||
@@ -715,7 +713,7 @@ def __call__(self, stage, url):
|
|||||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||||
except ProcessError:
|
except ProcessError:
|
||||||
output = ""
|
output = ""
|
||||||
lines = output.split("\n")
|
lines = output.splitlines()
|
||||||
|
|
||||||
# Determine the build system based on the files contained
|
# Determine the build system based on the files contained
|
||||||
# in the archive.
|
# in the archive.
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
|
@@ -13,8 +13,6 @@
|
|||||||
It is up to the user to ensure binary compatibility between the deprecated
|
It is up to the user to ensure binary compatibility between the deprecated
|
||||||
installation and its deprecator.
|
installation and its deprecator.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@@ -86,6 +86,13 @@ def env_activate_setup_parser(subparser):
|
|||||||
const="bat",
|
const="bat",
|
||||||
help="print bat commands to activate the environment",
|
help="print bat commands to activate the environment",
|
||||||
)
|
)
|
||||||
|
shells.add_argument(
|
||||||
|
"--pwsh",
|
||||||
|
action="store_const",
|
||||||
|
dest="shell",
|
||||||
|
const="pwsh",
|
||||||
|
help="print powershell commands to activate environment",
|
||||||
|
)
|
||||||
|
|
||||||
view_options = subparser.add_mutually_exclusive_group()
|
view_options = subparser.add_mutually_exclusive_group()
|
||||||
view_options.add_argument(
|
view_options.add_argument(
|
||||||
@@ -302,7 +309,7 @@ def env_create(args):
|
|||||||
# the environment should not include a view.
|
# the environment should not include a view.
|
||||||
with_view = None
|
with_view = None
|
||||||
|
|
||||||
_env_create(
|
env = _env_create(
|
||||||
args.create_env,
|
args.create_env,
|
||||||
init_file=args.envfile,
|
init_file=args.envfile,
|
||||||
dir=args.dir,
|
dir=args.dir,
|
||||||
@@ -310,6 +317,9 @@ def env_create(args):
|
|||||||
keep_relative=args.keep_relative,
|
keep_relative=args.keep_relative,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Generate views, only really useful for environments created from spack.lock files.
|
||||||
|
env.regenerate_views()
|
||||||
|
|
||||||
|
|
||||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||||
"""Create a new environment, with an optional yaml description.
|
"""Create a new environment, with an optional yaml description.
|
||||||
@@ -408,7 +418,7 @@ def env_list(args):
|
|||||||
colify(color_names, indent=4)
|
colify(color_names, indent=4)
|
||||||
|
|
||||||
|
|
||||||
class ViewAction(object):
|
class ViewAction:
|
||||||
regenerate = "regenerate"
|
regenerate = "regenerate"
|
||||||
enable = "enable"
|
enable = "enable"
|
||||||
disable = "disable"
|
disable = "disable"
|
||||||
|
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
@@ -79,6 +77,12 @@ def setup_parser(subparser):
|
|||||||
read_cray_manifest.add_argument(
|
read_cray_manifest.add_argument(
|
||||||
"--directory", default=None, help="specify a directory storing a group of manifest files"
|
"--directory", default=None, help="specify a directory storing a group of manifest files"
|
||||||
)
|
)
|
||||||
|
read_cray_manifest.add_argument(
|
||||||
|
"--ignore-default-dir",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="ignore the default directory of manifest files",
|
||||||
|
)
|
||||||
read_cray_manifest.add_argument(
|
read_cray_manifest.add_argument(
|
||||||
"--dry-run",
|
"--dry-run",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -177,11 +181,16 @@ def external_read_cray_manifest(args):
|
|||||||
manifest_directory=args.directory,
|
manifest_directory=args.directory,
|
||||||
dry_run=args.dry_run,
|
dry_run=args.dry_run,
|
||||||
fail_on_error=args.fail_on_error,
|
fail_on_error=args.fail_on_error,
|
||||||
|
ignore_default_dir=args.ignore_default_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _collect_and_consume_cray_manifest_files(
|
def _collect_and_consume_cray_manifest_files(
|
||||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
manifest_file=None,
|
||||||
|
manifest_directory=None,
|
||||||
|
dry_run=False,
|
||||||
|
fail_on_error=False,
|
||||||
|
ignore_default_dir=False,
|
||||||
):
|
):
|
||||||
manifest_files = []
|
manifest_files = []
|
||||||
if manifest_file:
|
if manifest_file:
|
||||||
@@ -191,7 +200,7 @@ def _collect_and_consume_cray_manifest_files(
|
|||||||
if manifest_directory:
|
if manifest_directory:
|
||||||
manifest_dirs.append(manifest_directory)
|
manifest_dirs.append(manifest_directory)
|
||||||
|
|
||||||
if os.path.isdir(cray_manifest.default_path):
|
if not ignore_default_dir and os.path.isdir(cray_manifest.default_path):
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Cray manifest path {0} exists: collecting all files to read.".format(
|
"Cray manifest path {0} exists: collecting all files to read.".format(
|
||||||
cray_manifest.default_path
|
cray_manifest.default_path
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import textwrap
|
import textwrap
|
||||||
from itertools import zip_longest
|
from itertools import zip_longest
|
||||||
|
|
||||||
@@ -73,7 +71,7 @@ def variant(s):
|
|||||||
return spack.spec.enabled_variant_color + s + plain_format
|
return spack.spec.enabled_variant_color + s + plain_format
|
||||||
|
|
||||||
|
|
||||||
class VariantFormatter(object):
|
class VariantFormatter:
|
||||||
def __init__(self, variants):
|
def __init__(self, variants):
|
||||||
self.variants = variants
|
self.variants = variants
|
||||||
self.headers = ("Name [Default]", "When", "Allowed values", "Description")
|
self.headers = ("Name [Default]", "When", "Allowed values", "Description")
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
@@ -102,7 +100,7 @@ def list_files(args):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class LicenseError(object):
|
class LicenseError:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.error_counts = defaultdict(int)
|
self.error_counts = defaultdict(int)
|
||||||
|
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import division, print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import json
|
import json
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
@@ -49,9 +49,8 @@ def setup_parser(subparser):
|
|||||||
"-g",
|
"-g",
|
||||||
"--git-installer-verbosity",
|
"--git-installer-verbosity",
|
||||||
default="",
|
default="",
|
||||||
choices=set(["SILENT", "VERYSILENT"]),
|
choices=["SILENT", "VERYSILENT"],
|
||||||
help="Level of verbosity provided by bundled Git Installer.\
|
help="Level of verbosity provided by bundled Git Installer. Default is fully verbose",
|
||||||
Default is fully verbose",
|
|
||||||
required=False,
|
required=False,
|
||||||
action="store",
|
action="store",
|
||||||
dest="git_verbosity",
|
dest="git_verbosity",
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
@@ -116,21 +116,23 @@ def one_spec_or_raise(specs):
|
|||||||
|
|
||||||
|
|
||||||
def check_module_set_name(name):
|
def check_module_set_name(name):
|
||||||
modules_config = spack.config.get("modules")
|
modules = spack.config.get("modules")
|
||||||
valid_names = set(
|
if name != "prefix_inspections" and name in modules:
|
||||||
[
|
return
|
||||||
key
|
|
||||||
for key, value in modules_config.items()
|
|
||||||
if isinstance(value, dict) and value.get("enable", [])
|
|
||||||
]
|
|
||||||
)
|
|
||||||
if "enable" in modules_config and modules_config["enable"]:
|
|
||||||
valid_names.add("default")
|
|
||||||
|
|
||||||
if name not in valid_names:
|
names = [k for k in modules if k != "prefix_inspections"]
|
||||||
msg = "Cannot use invalid module set %s." % name
|
|
||||||
msg += " Valid module set names are %s" % list(valid_names)
|
if not names:
|
||||||
raise spack.config.ConfigError(msg)
|
raise spack.config.ConfigError(
|
||||||
|
f"Module set configuration is missing. Cannot use module set '{name}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
pretty_names = "', '".join(names)
|
||||||
|
|
||||||
|
raise spack.config.ConfigError(
|
||||||
|
f"Cannot use invalid module set '{name}'.",
|
||||||
|
f"Valid module set names are: '{pretty_names}'.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_missing_modules_warning = (
|
_missing_modules_warning = (
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import code
|
import code
|
||||||
import os
|
import os
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -44,7 +42,11 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Below are arguments w.r.t. spec display (like spack spec)
|
# Below are arguments w.r.t. spec display (like spack spec)
|
||||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||||
|
|
||||||
|
install_status_group = subparser.add_mutually_exclusive_group()
|
||||||
|
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-y",
|
"-y",
|
||||||
"--yaml",
|
"--yaml",
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
@@ -31,7 +29,11 @@ def setup_parser(subparser):
|
|||||||
for further documentation regarding the spec syntax, see:
|
for further documentation regarding the spec syntax, see:
|
||||||
spack help --spec
|
spack help --spec
|
||||||
"""
|
"""
|
||||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||||
|
|
||||||
|
install_status_group = subparser.add_mutually_exclusive_group()
|
||||||
|
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||||
|
|
||||||
format_group = subparser.add_mutually_exclusive_group()
|
format_group = subparser.add_mutually_exclusive_group()
|
||||||
format_group.add_argument(
|
format_group.add_argument(
|
||||||
"-y",
|
"-y",
|
||||||
|
@@ -60,7 +60,7 @@ def is_package(f):
|
|||||||
|
|
||||||
|
|
||||||
#: decorator for adding tools to the list
|
#: decorator for adding tools to the list
|
||||||
class tool(object):
|
class tool:
|
||||||
def __init__(self, name, required=False):
|
def __init__(self, name, required=False):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.required = required
|
self.required = required
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import os
|
import os
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
@@ -25,7 +23,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# tutorial configuration parameters
|
# tutorial configuration parameters
|
||||||
tutorial_branch = "releases/v0.19"
|
tutorial_branch = "releases/v0.20"
|
||||||
tutorial_mirror = "file:///mirror"
|
tutorial_mirror = "file:///mirror"
|
||||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||||
|
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import division, print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import collections
|
import collections
|
||||||
import io
|
import io
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import division, print_function
|
|
||||||
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
@@ -290,7 +288,7 @@ def url_stats(args):
|
|||||||
# dictionary of issue type -> package -> descriptions
|
# dictionary of issue type -> package -> descriptions
|
||||||
issues = defaultdict(lambda: defaultdict(lambda: []))
|
issues = defaultdict(lambda: defaultdict(lambda: []))
|
||||||
|
|
||||||
class UrlStats(object):
|
class UrlStats:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.total = 0
|
self.total = 0
|
||||||
self.schemes = defaultdict(lambda: 0)
|
self.schemes = defaultdict(lambda: 0)
|
||||||
|
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@@ -189,7 +189,7 @@ def in_system_subdirectory(path):
|
|||||||
return any(path_contains_subdirectory(path, x) for x in system_dirs)
|
return any(path_contains_subdirectory(path, x) for x in system_dirs)
|
||||||
|
|
||||||
|
|
||||||
class Compiler(object):
|
class Compiler:
|
||||||
"""This class encapsulates a Spack "compiler", which includes C,
|
"""This class encapsulates a Spack "compiler", which includes C,
|
||||||
C++, and Fortran compilers. Subclasses should implement
|
C++, and Fortran compilers. Subclasses should implement
|
||||||
support for specific compilers, their possible names, arguments,
|
support for specific compilers, their possible names, arguments,
|
||||||
@@ -673,17 +673,17 @@ class CompilerAccessError(spack.error.SpackError):
|
|||||||
def __init__(self, compiler, paths):
|
def __init__(self, compiler, paths):
|
||||||
msg = "Compiler '%s' has executables that are missing" % compiler.spec
|
msg = "Compiler '%s' has executables that are missing" % compiler.spec
|
||||||
msg += " or are not executable: %s" % paths
|
msg += " or are not executable: %s" % paths
|
||||||
super(CompilerAccessError, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class InvalidCompilerError(spack.error.SpackError):
|
class InvalidCompilerError(spack.error.SpackError):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(InvalidCompilerError, self).__init__("Compiler has no executables.")
|
super().__init__("Compiler has no executables.")
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedCompilerFlag(spack.error.SpackError):
|
class UnsupportedCompilerFlag(spack.error.SpackError):
|
||||||
def __init__(self, compiler, feature, flag_name, ver_string=None):
|
def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||||
super(UnsupportedCompilerFlag, self).__init__(
|
super().__init__(
|
||||||
"{0} ({1}) does not support {2} (as compiler.{3}).".format(
|
"{0} ({1}) does not support {2} (as compiler.{3}).".format(
|
||||||
compiler.name, ver_string if ver_string else compiler.version, feature, flag_name
|
compiler.name, ver_string if ver_string else compiler.version, feature, flag_name
|
||||||
),
|
),
|
||||||
|
@@ -37,7 +37,6 @@
|
|||||||
"implicit_rpaths",
|
"implicit_rpaths",
|
||||||
"extra_rpaths",
|
"extra_rpaths",
|
||||||
]
|
]
|
||||||
_cache_config_file = []
|
|
||||||
|
|
||||||
# TODO: Caches at module level make it difficult to mock configurations in
|
# TODO: Caches at module level make it difficult to mock configurations in
|
||||||
# TODO: unit tests. It might be worth reworking their implementation.
|
# TODO: unit tests. It might be worth reworking their implementation.
|
||||||
@@ -112,36 +111,26 @@ def _to_dict(compiler):
|
|||||||
def get_compiler_config(scope=None, init_config=True):
|
def get_compiler_config(scope=None, init_config=True):
|
||||||
"""Return the compiler configuration for the specified architecture."""
|
"""Return the compiler configuration for the specified architecture."""
|
||||||
|
|
||||||
def init_compiler_config():
|
config = spack.config.get("compilers", scope=scope) or []
|
||||||
"""Compiler search used when Spack has no compilers."""
|
if config or not init_config:
|
||||||
compilers = find_compilers()
|
return config
|
||||||
compilers_dict = []
|
|
||||||
for compiler in compilers:
|
|
||||||
compilers_dict.append(_to_dict(compiler))
|
|
||||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
|
||||||
|
|
||||||
|
merged_config = spack.config.get("compilers")
|
||||||
|
if merged_config:
|
||||||
|
return config
|
||||||
|
|
||||||
|
_init_compiler_config(scope=scope)
|
||||||
config = spack.config.get("compilers", scope=scope)
|
config = spack.config.get("compilers", scope=scope)
|
||||||
# Update the configuration if there are currently no compilers
|
return config
|
||||||
# configured. Avoid updating automatically if there ARE site
|
|
||||||
# compilers configured but no user ones.
|
|
||||||
if not config and init_config:
|
def _init_compiler_config(*, scope):
|
||||||
if scope is None:
|
"""Compiler search used when Spack has no compilers."""
|
||||||
# We know no compilers were configured in any scope.
|
compilers = find_compilers()
|
||||||
init_compiler_config()
|
compilers_dict = []
|
||||||
config = spack.config.get("compilers", scope=scope)
|
for compiler in compilers:
|
||||||
elif scope == "user":
|
compilers_dict.append(_to_dict(compiler))
|
||||||
# Check the site config and update the user config if
|
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||||
# nothing is configured at the site level.
|
|
||||||
site_config = spack.config.get("compilers", scope="site")
|
|
||||||
sys_config = spack.config.get("compilers", scope="system")
|
|
||||||
if not site_config and not sys_config:
|
|
||||||
init_compiler_config()
|
|
||||||
config = spack.config.get("compilers", scope=scope)
|
|
||||||
return config
|
|
||||||
elif config:
|
|
||||||
return config
|
|
||||||
else:
|
|
||||||
return [] # Return empty list which we will later append to.
|
|
||||||
|
|
||||||
|
|
||||||
def compiler_config_files():
|
def compiler_config_files():
|
||||||
@@ -165,52 +154,65 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
|
|||||||
compiler_config = get_compiler_config(scope, init_config)
|
compiler_config = get_compiler_config(scope, init_config)
|
||||||
for compiler in compilers:
|
for compiler in compilers:
|
||||||
compiler_config.append(_to_dict(compiler))
|
compiler_config.append(_to_dict(compiler))
|
||||||
global _cache_config_file
|
|
||||||
_cache_config_file = compiler_config
|
|
||||||
spack.config.set("compilers", compiler_config, scope=scope)
|
spack.config.set("compilers", compiler_config, scope=scope)
|
||||||
|
|
||||||
|
|
||||||
@_auto_compiler_spec
|
@_auto_compiler_spec
|
||||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||||
"""Remove compilers from the config, by spec.
|
"""Remove compilers from configuration by spec.
|
||||||
|
|
||||||
|
If scope is None, all the scopes are searched for removal.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
compiler_specs: a list of CompilerSpec objects.
|
compiler_spec: compiler to be removed
|
||||||
scope: configuration scope to modify.
|
scope: configuration scope to modify
|
||||||
"""
|
"""
|
||||||
# Need a better way for this
|
candidate_scopes = [scope]
|
||||||
global _cache_config_file
|
if scope is None:
|
||||||
|
candidate_scopes = spack.config.config.scopes.keys()
|
||||||
|
|
||||||
|
removal_happened = False
|
||||||
|
for current_scope in candidate_scopes:
|
||||||
|
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||||
|
|
||||||
|
return removal_happened
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_compiler_from_scope(compiler_spec, scope):
|
||||||
|
"""Removes a compiler from a specific configuration scope.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
compiler_spec: compiler to be removed
|
||||||
|
scope: configuration scope under consideration
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if one or more compiler entries were actually removed, False otherwise
|
||||||
|
"""
|
||||||
|
assert scope is not None, "a specific scope is needed when calling this function"
|
||||||
compiler_config = get_compiler_config(scope)
|
compiler_config = get_compiler_config(scope)
|
||||||
config_length = len(compiler_config)
|
|
||||||
|
|
||||||
filtered_compiler_config = [
|
filtered_compiler_config = [
|
||||||
comp
|
compiler_entry
|
||||||
for comp in compiler_config
|
for compiler_entry in compiler_config
|
||||||
if not spack.spec.parse_with_version_concrete(
|
if not spack.spec.parse_with_version_concrete(
|
||||||
comp["compiler"]["spec"], compiler=True
|
compiler_entry["compiler"]["spec"], compiler=True
|
||||||
).satisfies(compiler_spec)
|
).satisfies(compiler_spec)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Update the cache for changes
|
if len(filtered_compiler_config) == len(compiler_config):
|
||||||
_cache_config_file = filtered_compiler_config
|
return False
|
||||||
if len(filtered_compiler_config) == config_length: # No items removed
|
|
||||||
CompilerSpecInsufficientlySpecificError(compiler_spec)
|
# We need to preserve the YAML type for comments, hence we are copying the
|
||||||
spack.config.set("compilers", filtered_compiler_config, scope=scope)
|
# items in the list that has just been retrieved
|
||||||
|
compiler_config[:] = filtered_compiler_config
|
||||||
|
spack.config.set("compilers", compiler_config, scope=scope)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def all_compilers_config(scope=None, init_config=True):
|
def all_compilers_config(scope=None, init_config=True):
|
||||||
"""Return a set of specs for all the compiler versions currently
|
"""Return a set of specs for all the compiler versions currently
|
||||||
available to build with. These are instances of CompilerSpec.
|
available to build with. These are instances of CompilerSpec.
|
||||||
"""
|
"""
|
||||||
# Get compilers for this architecture.
|
return get_compiler_config(scope, init_config)
|
||||||
# Create a cache of the config file so we don't load all the time.
|
|
||||||
global _cache_config_file
|
|
||||||
if not _cache_config_file:
|
|
||||||
_cache_config_file = get_compiler_config(scope, init_config)
|
|
||||||
return _cache_config_file
|
|
||||||
else:
|
|
||||||
return _cache_config_file
|
|
||||||
|
|
||||||
|
|
||||||
def all_compiler_specs(scope=None, init_config=True):
|
def all_compiler_specs(scope=None, init_config=True):
|
||||||
@@ -367,7 +369,7 @@ def compiler_specs_for_arch(arch_spec, scope=None):
|
|||||||
return [c.spec for c in compilers_for_arch(arch_spec, scope)]
|
return [c.spec for c in compilers_for_arch(arch_spec, scope)]
|
||||||
|
|
||||||
|
|
||||||
class CacheReference(object):
|
class CacheReference:
|
||||||
"""This acts as a hashable reference to any object (regardless of whether
|
"""This acts as a hashable reference to any object (regardless of whether
|
||||||
the object itself is hashable) and also prevents the object from being
|
the object itself is hashable) and also prevents the object from being
|
||||||
garbage-collected (so if two CacheReference objects are equal, they
|
garbage-collected (so if two CacheReference objects are equal, they
|
||||||
@@ -818,7 +820,7 @@ def name_matches(name, name_list):
|
|||||||
|
|
||||||
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
||||||
def __init__(self, compiler_spec):
|
def __init__(self, compiler_spec):
|
||||||
super(InvalidCompilerConfigurationError, self).__init__(
|
super().__init__(
|
||||||
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
|
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
|
||||||
"Compiler configuration must contain entries for all compilers: %s"
|
"Compiler configuration must contain entries for all compilers: %s"
|
||||||
% _path_instance_vars,
|
% _path_instance_vars,
|
||||||
@@ -827,19 +829,17 @@ def __init__(self, compiler_spec):
|
|||||||
|
|
||||||
class NoCompilersError(spack.error.SpackError):
|
class NoCompilersError(spack.error.SpackError):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(NoCompilersError, self).__init__("Spack could not find any compilers!")
|
super().__init__("Spack could not find any compilers!")
|
||||||
|
|
||||||
|
|
||||||
class UnknownCompilerError(spack.error.SpackError):
|
class UnknownCompilerError(spack.error.SpackError):
|
||||||
def __init__(self, compiler_name):
|
def __init__(self, compiler_name):
|
||||||
super(UnknownCompilerError, self).__init__(
|
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
|
||||||
"Spack doesn't support the requested compiler: {0}".format(compiler_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NoCompilerForSpecError(spack.error.SpackError):
|
class NoCompilerForSpecError(spack.error.SpackError):
|
||||||
def __init__(self, compiler_spec, target):
|
def __init__(self, compiler_spec, target):
|
||||||
super(NoCompilerForSpecError, self).__init__(
|
super().__init__(
|
||||||
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
|
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -858,11 +858,9 @@ def __init__(self, compiler_spec, arch_spec):
|
|||||||
+ " in the following files:\n\t"
|
+ " in the following files:\n\t"
|
||||||
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
|
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
|
||||||
)
|
)
|
||||||
super(CompilerDuplicateError, self).__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
|
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
|
||||||
def __init__(self, compiler_spec):
|
def __init__(self, compiler_spec):
|
||||||
super(CompilerSpecInsufficientlySpecificError, self).__init__(
|
super().__init__("Multiple compilers satisfy spec %s" % compiler_spec)
|
||||||
"Multiple compilers satisfy spec %s" % compiler_spec
|
|
||||||
)
|
|
||||||
|
@@ -132,7 +132,7 @@ def setup_custom_environment(self, pkg, env):
|
|||||||
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
|
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
|
||||||
related tools to use this Xcode.app.
|
related tools to use this Xcode.app.
|
||||||
"""
|
"""
|
||||||
super(AppleClang, self).setup_custom_environment(pkg, env)
|
super().setup_custom_environment(pkg, env)
|
||||||
|
|
||||||
if not pkg.use_xcode:
|
if not pkg.use_xcode:
|
||||||
# if we do it for all packages, we get into big troubles with MPI:
|
# if we do it for all packages, we get into big troubles with MPI:
|
||||||
|
@@ -12,7 +12,7 @@ class Cce(Compiler):
|
|||||||
"""Cray compiler environment compiler."""
|
"""Cray compiler environment compiler."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(Cce, self).__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
# For old cray compilers on module based systems we replace
|
# For old cray compilers on module based systems we replace
|
||||||
# ``version_argument`` with the old value. Cannot be a property
|
# ``version_argument`` with the old value. Cannot be a property
|
||||||
# as the new value is used in classmethods for path-based detection
|
# as the new value is used in classmethods for path-based detection
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user