Compare commits
628 Commits
features/o
...
revert-363
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
37afca39ae | ||
|
|
b0e54bc0ac | ||
|
|
d20fee0c42 | ||
|
|
fdd94d1ee9 | ||
|
|
fa37ff51e7 | ||
|
|
2853051e48 | ||
|
|
862e9a59c4 | ||
|
|
4dc9d9f60e | ||
|
|
3d597e29be | ||
|
|
739a67eda8 | ||
|
|
47d710dc4d | ||
|
|
101c5b51bb | ||
|
|
f4e4d83a02 | ||
|
|
68979f8740 | ||
|
|
37fbfcf7fe | ||
|
|
311d3be18e | ||
|
|
2393e456ee | ||
|
|
e09caf2ab8 | ||
|
|
f15efd27bd | ||
|
|
668fb7f5dd | ||
|
|
e1a5228a16 | ||
|
|
8a48f9a479 | ||
|
|
6551ad8711 | ||
|
|
a2479c13a6 | ||
|
|
59fecb353c | ||
|
|
d0098876e0 | ||
|
|
8d2f08ae85 | ||
|
|
d71ee98bad | ||
|
|
ed989be8eb | ||
|
|
00d45d052d | ||
|
|
f86f30ad71 | ||
|
|
4f4c9f440e | ||
|
|
848ab435a5 | ||
|
|
2418bf446d | ||
|
|
893bb8d7c7 | ||
|
|
9e6d048af2 | ||
|
|
d17321ffc0 | ||
|
|
38912d17f7 | ||
|
|
3185bd81b1 | ||
|
|
25035a302e | ||
|
|
85c1b16213 | ||
|
|
e2bc51fcad | ||
|
|
c3b56f789c | ||
|
|
492ec0e783 | ||
|
|
653057e93a | ||
|
|
87c1cfaf03 | ||
|
|
647bb5124e | ||
|
|
7c646a5dbd | ||
|
|
692d624f45 | ||
|
|
98adc0b3f9 | ||
|
|
628dbce6f6 | ||
|
|
49079d6f88 | ||
|
|
ff23a2a2ee | ||
|
|
725389ff32 | ||
|
|
781959603d | ||
|
|
787fe3283f | ||
|
|
c9c2b5e6bb | ||
|
|
97bdf28b29 | ||
|
|
f49e9591b7 | ||
|
|
a0bc32c319 | ||
|
|
52bcd0eda1 | ||
|
|
2e9d0e146e | ||
|
|
84ab72557a | ||
|
|
1d62d9460d | ||
|
|
b9f32b1e7a | ||
|
|
2b539129f0 | ||
|
|
9288ece826 | ||
|
|
9b09d8bc49 | ||
|
|
4d90f464e1 | ||
|
|
6edc480736 | ||
|
|
649e9ae0ad | ||
|
|
98ece85e63 | ||
|
|
fa57e62744 | ||
|
|
880c819d97 | ||
|
|
5fedb10370 | ||
|
|
9787253842 | ||
|
|
3984a1e159 | ||
|
|
bfca1729fa | ||
|
|
8d8a008ef2 | ||
|
|
b7505aa726 | ||
|
|
2cecb4b00c | ||
|
|
8695d96bd1 | ||
|
|
fa0749bfb8 | ||
|
|
b431c4dc06 | ||
|
|
c3e41153ac | ||
|
|
e1752ca382 | ||
|
|
2bcd4e0ecd | ||
|
|
550bda3096 | ||
|
|
334bc69a64 | ||
|
|
88d78025a6 | ||
|
|
7e981d83fd | ||
|
|
b28e9e651d | ||
|
|
5dc8ed2694 | ||
|
|
199f71ea48 | ||
|
|
b8e5fc061d | ||
|
|
b77a4331bc | ||
|
|
adcdf4a7e2 | ||
|
|
dfd63ccd73 | ||
|
|
2bfcfd1f72 | ||
|
|
7518362706 | ||
|
|
13d8bc47c8 | ||
|
|
d5c0d1ce58 | ||
|
|
46bd481124 | ||
|
|
e92b996db9 | ||
|
|
eb1723332e | ||
|
|
3afef0635f | ||
|
|
032385ae51 | ||
|
|
7a9578ce7d | ||
|
|
e155df5ada | ||
|
|
005af3e755 | ||
|
|
85e721c16c | ||
|
|
e61ae290a2 | ||
|
|
782d3b889a | ||
|
|
3a7e5372d0 | ||
|
|
114e9b528f | ||
|
|
8e3021cdb1 | ||
|
|
9542d46395 | ||
|
|
0825e9a95e | ||
|
|
b586c8cf1d | ||
|
|
eba3f5503b | ||
|
|
e30a89fb7c | ||
|
|
0646c953e5 | ||
|
|
d6d68b892a | ||
|
|
8b1c5d910d | ||
|
|
a800361344 | ||
|
|
631a3d849f | ||
|
|
af09297a76 | ||
|
|
1b27a2dda5 | ||
|
|
3ebe5939e3 | ||
|
|
c9a4bf8d3f | ||
|
|
973e37823c | ||
|
|
41d7fe0a50 | ||
|
|
1af863a1e3 | ||
|
|
75714d30f5 | ||
|
|
d5e30ac5f1 | ||
|
|
8c4265f033 | ||
|
|
b8b6ae42a0 | ||
|
|
5532350d4b | ||
|
|
620effec1b | ||
|
|
df97827a7b | ||
|
|
4ffdde94ef | ||
|
|
5b04146f8a | ||
|
|
eddbbb867d | ||
|
|
32154e6fc7 | ||
|
|
6618b0c830 | ||
|
|
d84c6ad29e | ||
|
|
2f07c64f2d | ||
|
|
ca5cab8498 | ||
|
|
5f8ee20c7c | ||
|
|
fd70a2cc07 | ||
|
|
31201f91bc | ||
|
|
da0b76047d | ||
|
|
0478e5f684 | ||
|
|
4f7c147d50 | ||
|
|
73a887ee7c | ||
|
|
8195f27a66 | ||
|
|
a60fa7ff7d | ||
|
|
6272853030 | ||
|
|
507b42c54f | ||
|
|
3897c1308e | ||
|
|
b54d208aea | ||
|
|
612aa744f6 | ||
|
|
97193a25ce | ||
|
|
5bf96561ee | ||
|
|
f2ba1d276b | ||
|
|
4e060ba933 | ||
|
|
dd15c37021 | ||
|
|
141c154948 | ||
|
|
e51447c2c0 | ||
|
|
a84fb716a0 | ||
|
|
a11f06885f | ||
|
|
8517a74f37 | ||
|
|
34ef01a5c8 | ||
|
|
86e49a63ce | ||
|
|
d76845e875 | ||
|
|
97d6c741b0 | ||
|
|
09fd3e8e61 | ||
|
|
e341dac014 | ||
|
|
38383743e7 | ||
|
|
8b94cc4ec2 | ||
|
|
0a55b44092 | ||
|
|
d97bb895e8 | ||
|
|
e8482d9e79 | ||
|
|
3f3565e890 | ||
|
|
3bb35fbaf6 | ||
|
|
4572052c63 | ||
|
|
ba00da61e4 | ||
|
|
825599a510 | ||
|
|
4f6f1b620f | ||
|
|
08dc2d4020 | ||
|
|
6af84c4574 | ||
|
|
50cc1d12f9 | ||
|
|
c29168eff1 | ||
|
|
5ed1efab40 | ||
|
|
887d70410d | ||
|
|
a9936141ee | ||
|
|
5744fc3637 | ||
|
|
b13c201f46 | ||
|
|
e2ab46251b | ||
|
|
193c927bd2 | ||
|
|
9d195da8ee | ||
|
|
132b89178e | ||
|
|
6491e08f5d | ||
|
|
bb73dfc02e | ||
|
|
64fa902ba6 | ||
|
|
1a8eefe09b | ||
|
|
85d51bfd9a | ||
|
|
e5d78e3780 | ||
|
|
99893a6475 | ||
|
|
5f8f89b9c9 | ||
|
|
028535030c | ||
|
|
0e295afb1c | ||
|
|
e58c84e63e | ||
|
|
37904c3342 | ||
|
|
9f116c7bb1 | ||
|
|
b5f3b5bf78 | ||
|
|
93887edba8 | ||
|
|
cd42fc5cc8 | ||
|
|
9a1254063a | ||
|
|
32f8ee6d58 | ||
|
|
25239924fa | ||
|
|
7b27cd2f94 | ||
|
|
02e579d23d | ||
|
|
6add885bb2 | ||
|
|
96b205ce6c | ||
|
|
1711e186fe | ||
|
|
16f70ca78d | ||
|
|
2437a1d554 | ||
|
|
a6432bc770 | ||
|
|
ae6902b7ab | ||
|
|
40019dacd9 | ||
|
|
5c48304d07 | ||
|
|
bab2f0a1b0 | ||
|
|
ecc781fb3c | ||
|
|
1691b7caac | ||
|
|
4f848f9200 | ||
|
|
08298b6766 | ||
|
|
11a509a40e | ||
|
|
e3a7ad8112 | ||
|
|
6efec2b2bd | ||
|
|
ecd6fc00fd | ||
|
|
87dc28a2f7 | ||
|
|
b2633e9057 | ||
|
|
116bc396c2 | ||
|
|
39049e2bde | ||
|
|
309969053e | ||
|
|
3fbd06023c | ||
|
|
853b964947 | ||
|
|
f7da7db9b2 | ||
|
|
5bae742826 | ||
|
|
03636cd6ac | ||
|
|
ee1ea1f430 | ||
|
|
ff019f868b | ||
|
|
2bbc6390dc | ||
|
|
2107b6bf00 | ||
|
|
31de7ea56c | ||
|
|
55870efbcc | ||
|
|
c38b463954 | ||
|
|
5a4bc51bc0 | ||
|
|
528aca7c88 | ||
|
|
9fcfdf7a97 | ||
|
|
66bf9bc7a6 | ||
|
|
dee5cb1aeb | ||
|
|
25666f9254 | ||
|
|
d78d112f18 | ||
|
|
1a97fddf5a | ||
|
|
29d989a048 | ||
|
|
79bba432df | ||
|
|
ef4971d2e1 | ||
|
|
1cc7ea651a | ||
|
|
16fd615fad | ||
|
|
61af6b8f37 | ||
|
|
7c3c6011de | ||
|
|
36d6660739 | ||
|
|
9199dabc0b | ||
|
|
9f6b2f8e96 | ||
|
|
ba1fd789e0 | ||
|
|
013b2dec1e | ||
|
|
d9cf959010 | ||
|
|
a76066ec42 | ||
|
|
8ce6a5355e | ||
|
|
e61a1a6e74 | ||
|
|
16d7270700 | ||
|
|
e77e93b66a | ||
|
|
0c2a801ff2 | ||
|
|
c84ce77969 | ||
|
|
3464570b55 | ||
|
|
2a1428e5d4 | ||
|
|
6f15cef281 | ||
|
|
6fbda46c12 | ||
|
|
6c9d079cfb | ||
|
|
a741350e69 | ||
|
|
fe5865da0d | ||
|
|
1e9a654f17 | ||
|
|
844701b974 | ||
|
|
1d081565db | ||
|
|
39abe69c97 | ||
|
|
f5228cf59c | ||
|
|
08d7f47278 | ||
|
|
92c6112991 | ||
|
|
3605105cf1 | ||
|
|
26fd1ac5b0 | ||
|
|
e1301df60c | ||
|
|
181bb54372 | ||
|
|
f3595da600 | ||
|
|
16c67ff9b4 | ||
|
|
ce7409bbf7 | ||
|
|
369914c3e1 | ||
|
|
64e0ca5a89 | ||
|
|
51a5377ceb | ||
|
|
243627104e | ||
|
|
e817b0b9d0 | ||
|
|
eb59097576 | ||
|
|
73c1f3f893 | ||
|
|
566fb51d71 | ||
|
|
617f44f9ed | ||
|
|
a51f4b77d9 | ||
|
|
9e6afc7dec | ||
|
|
68874a72fb | ||
|
|
e560beed19 | ||
|
|
de586bb66c | ||
|
|
846cd05c7e | ||
|
|
1ef313b604 | ||
|
|
73026f4f4b | ||
|
|
08dd6d1a21 | ||
|
|
e9173a59fd | ||
|
|
7401c97037 | ||
|
|
15433cfaf1 | ||
|
|
99aa0ef0cd | ||
|
|
28934e5f77 | ||
|
|
f08598427d | ||
|
|
cc4f7c224a | ||
|
|
ee5b2936e4 | ||
|
|
f7a6446d3f | ||
|
|
624e28ee03 | ||
|
|
784e5f5789 | ||
|
|
180618b25a | ||
|
|
aefcce51fc | ||
|
|
884a356b1e | ||
|
|
ee69f2d516 | ||
|
|
bc5bb06f1f | ||
|
|
1b8561f752 | ||
|
|
7d54c24939 | ||
|
|
960923287d | ||
|
|
4a9ffdcfa2 | ||
|
|
22d4e79037 | ||
|
|
2777ca83eb | ||
|
|
a2423f5736 | ||
|
|
81765e0278 | ||
|
|
0d4f9b26b8 | ||
|
|
87c21a58d1 | ||
|
|
5900378cff | ||
|
|
d54611af2c | ||
|
|
39adb65dc7 | ||
|
|
db15e1895f | ||
|
|
7610926e5e | ||
|
|
703f687ca0 | ||
|
|
983a56e729 | ||
|
|
cbd0770497 | ||
|
|
b06648eb64 | ||
|
|
80d784c401 | ||
|
|
5b3ad0adaa | ||
|
|
3feadc0a36 | ||
|
|
8ec86e05c4 | ||
|
|
b34fd98915 | ||
|
|
a93d143f17 | ||
|
|
d0ced9da94 | ||
|
|
c37d6f97dc | ||
|
|
ec73157a34 | ||
|
|
e447c365ee | ||
|
|
c5c67145d3 | ||
|
|
a5bc83d635 | ||
|
|
1760553b70 | ||
|
|
62d9bf5fef | ||
|
|
cb49da1b6f | ||
|
|
c79d9ac5bd | ||
|
|
871ca3e805 | ||
|
|
89176bd3f6 | ||
|
|
b29a607ceb | ||
|
|
0c06ecc711 | ||
|
|
73d1e36da5 | ||
|
|
0d57c2ab24 | ||
|
|
272e69b2fd | ||
|
|
8efde89c0e | ||
|
|
c7ec47c658 | ||
|
|
013e82f74f | ||
|
|
fff7e6d626 | ||
|
|
ac1fe8765a | ||
|
|
acbf46d786 | ||
|
|
a753fa12fb | ||
|
|
27b2dc1608 | ||
|
|
d8f8b42bcb | ||
|
|
3995428ad2 | ||
|
|
76d41b7f9f | ||
|
|
a7501105b1 | ||
|
|
da33334488 | ||
|
|
68dfcd10e6 | ||
|
|
4e6a8a40b7 | ||
|
|
20f663d089 | ||
|
|
acf61daf99 | ||
|
|
a0233d2560 | ||
|
|
8c989e0aee | ||
|
|
2f5e7fb38c | ||
|
|
b9bc911921 | ||
|
|
462df718ff | ||
|
|
b20271a8e8 | ||
|
|
a62992b4b1 | ||
|
|
818459e6fc | ||
|
|
38bd499c09 | ||
|
|
f9de4c2da8 | ||
|
|
335ae31a59 | ||
|
|
c4e5ee8831 | ||
|
|
73c358819b | ||
|
|
6f396aff99 | ||
|
|
047b99fab9 | ||
|
|
a12a290ee1 | ||
|
|
bd6c9085f0 | ||
|
|
c0a0d60378 | ||
|
|
bc84ca126e | ||
|
|
3bb7570e02 | ||
|
|
a5b80662ae | ||
|
|
0c5360e3fd | ||
|
|
2ff337a2a5 | ||
|
|
f3841774f7 | ||
|
|
97c2dd3a5a | ||
|
|
3aae80ca07 | ||
|
|
973bc92813 | ||
|
|
42a02411b4 | ||
|
|
4561536403 | ||
|
|
6b694749d3 | ||
|
|
c0f48b30cf | ||
|
|
046416479a | ||
|
|
b17113b63d | ||
|
|
479f5a74a3 | ||
|
|
895886959f | ||
|
|
0bdb0b07fd | ||
|
|
817b59900a | ||
|
|
2ddd66ca48 | ||
|
|
7ddd796f89 | ||
|
|
1e2ef16b39 | ||
|
|
77355fd348 | ||
|
|
1facf99f51 | ||
|
|
07c8939679 | ||
|
|
28f4b5729a | ||
|
|
dd7af323ed | ||
|
|
2ba1f700e6 | ||
|
|
739ab89b65 | ||
|
|
7e5099627f | ||
|
|
eb29889f6e | ||
|
|
ae27df4113 | ||
|
|
ae2c8c02a2 | ||
|
|
caff9e4292 | ||
|
|
beb3414b4d | ||
|
|
ed07cee852 | ||
|
|
7c1a164219 | ||
|
|
18e0b893f2 | ||
|
|
df5b25764b | ||
|
|
44705b0a6e | ||
|
|
3bb03ea7d1 | ||
|
|
3c411bf135 | ||
|
|
f2363c1cb5 | ||
|
|
7188eeb604 | ||
|
|
583d89e95a | ||
|
|
4a24401ed0 | ||
|
|
2796794b19 | ||
|
|
dd854e86d9 | ||
|
|
1500246ab7 | ||
|
|
206cd94c0b | ||
|
|
ff1a425c8d | ||
|
|
ab999d5af9 | ||
|
|
513ff34e66 | ||
|
|
b59c8e9a43 | ||
|
|
9483e34d15 | ||
|
|
7e02139cad | ||
|
|
a08d86c201 | ||
|
|
4e70532cd1 | ||
|
|
2d2a1c82d4 | ||
|
|
a47ebe5784 | ||
|
|
4f97bb118f | ||
|
|
d71eca74a3 | ||
|
|
f88dec43df | ||
|
|
2e8306d324 | ||
|
|
d64d77e99e | ||
|
|
d7b11af731 | ||
|
|
68372a4dfe | ||
|
|
54500a5fca | ||
|
|
146464e063 | ||
|
|
07e251c887 | ||
|
|
f9f51cb930 | ||
|
|
ad3c22fae9 | ||
|
|
bedcc5449a | ||
|
|
a1a54fa8b7 | ||
|
|
2a97bcbd5a | ||
|
|
99fb4c4a47 | ||
|
|
5b52685216 | ||
|
|
53a924c20f | ||
|
|
09ad541e98 | ||
|
|
b109e16fba | ||
|
|
043a80ff9e | ||
|
|
b7f7af1713 | ||
|
|
e5bd319c19 | ||
|
|
90ad65a6e7 | ||
|
|
779e80b7c1 | ||
|
|
502e216ee2 | ||
|
|
d6ff426d48 | ||
|
|
c4311a250a | ||
|
|
ceaa304f5f | ||
|
|
038efa4173 | ||
|
|
773fd5ad84 | ||
|
|
9b46e92e13 | ||
|
|
f004311611 | ||
|
|
a4b949492b | ||
|
|
6ab792fb03 | ||
|
|
313c7386c4 | ||
|
|
b0b4a05d44 | ||
|
|
4e13b5374f | ||
|
|
07897900eb | ||
|
|
d286146c64 | ||
|
|
6b27aebeb4 | ||
|
|
5c7cccd052 | ||
|
|
26c314f04f | ||
|
|
9982d76bb1 | ||
|
|
3d41b71664 | ||
|
|
e27d3c4f75 | ||
|
|
5545fd34c0 | ||
|
|
6ebf9f65c0 | ||
|
|
3be9af8c13 | ||
|
|
6e477d547d | ||
|
|
9d6630e245 | ||
|
|
b8d15e816b | ||
|
|
0a233ce83a | ||
|
|
27ee08f5bb | ||
|
|
cb4c60c709 | ||
|
|
8e84dcd7ef | ||
|
|
447ad2a3e3 | ||
|
|
6f65ff4952 | ||
|
|
16adda3db9 | ||
|
|
cddef35ef8 | ||
|
|
1636c89aba | ||
|
|
9110f5dfb6 | ||
|
|
bc24a8f290 | ||
|
|
33bf1fd033 | ||
|
|
5b8917188a | ||
|
|
fbd5c5c2cd | ||
|
|
55e3026152 | ||
|
|
874f76e45e | ||
|
|
ebc24b7063 | ||
|
|
ce9f8143cc | ||
|
|
c1ff7bbf04 | ||
|
|
aa708c8981 | ||
|
|
712d358f7e | ||
|
|
e8238fe330 | ||
|
|
e8a19aa089 | ||
|
|
4a844a971a | ||
|
|
5e337d907b | ||
|
|
208c9585b8 | ||
|
|
1ae4ca611e | ||
|
|
54eddb2301 | ||
|
|
01dafb8047 | ||
|
|
6ef2c33b0e | ||
|
|
beddf4dcc4 | ||
|
|
e143065448 | ||
|
|
9d9ea07424 | ||
|
|
c769582709 | ||
|
|
1958c2f986 | ||
|
|
3016da79fe | ||
|
|
0500a3cec0 | ||
|
|
313797e3d4 | ||
|
|
ae189cfab8 | ||
|
|
7c09ab174b | ||
|
|
c14b6b80cc | ||
|
|
899d2708a3 | ||
|
|
5ccc2e1a8e | ||
|
|
7c01d3ba35 | ||
|
|
81f11d5562 | ||
|
|
747f032ec2 | ||
|
|
97ea182ee0 | ||
|
|
011c28a538 | ||
|
|
86320eb569 | ||
|
|
c42a4ec1ec | ||
|
|
fb644de086 | ||
|
|
352d56d6b0 | ||
|
|
b711d5a6a4 | ||
|
|
983c68cc30 | ||
|
|
7d8d538a71 | ||
|
|
fa3ced5242 | ||
|
|
73fcda19d2 | ||
|
|
65ee062bf6 | ||
|
|
573f7bf4cd | ||
|
|
9f3f4b38e8 | ||
|
|
476a29e1b6 | ||
|
|
603569e321 | ||
|
|
b935809948 | ||
|
|
b278a02406 | ||
|
|
9ff64b1627 | ||
|
|
5fc5ef8c58 | ||
|
|
946816d787 | ||
|
|
03fb5c6aa9 | ||
|
|
68f82357c8 | ||
|
|
f073a9d589 | ||
|
|
d17cc42a48 | ||
|
|
0468205751 | ||
|
|
68b711c1ad | ||
|
|
69369429b6 | ||
|
|
f360ce7035 | ||
|
|
96ac4c7f98 | ||
|
|
df72ba0d46 | ||
|
|
b2e6da36ef | ||
|
|
832c435588 | ||
|
|
5079086019 | ||
|
|
bfe06f694c | ||
|
|
ae022e98d8 | ||
|
|
3e1ba67e00 | ||
|
|
6eea171077 | ||
|
|
1734127b7a | ||
|
|
44ed0de8c0 | ||
|
|
09eb86e077 | ||
|
|
50691ccdd9 | ||
|
|
ce693ff304 | ||
|
|
18027d07d4 | ||
|
|
f31e8adc6b | ||
|
|
a6731b732e | ||
|
|
a00b92a69f | ||
|
|
90261029d8 | ||
|
|
85966a96b1 | ||
|
|
417d8ef547 | ||
|
|
dd434ec413 | ||
|
|
1894b64851 |
@@ -1,3 +1,5 @@
|
||||
# .git-blame-ignore-revs
|
||||
# Formatted entire codebase with black
|
||||
# Formatted entire codebase with black 23
|
||||
603569e321013a1a63a637813c94c2834d0a0023
|
||||
# Formatted entire codebase with black 22
|
||||
f52f6e99dbf1131886a80112b8c79dfc414afb7c
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,3 +1,4 @@
|
||||
*.py diff=python
|
||||
*.lp linguist-language=Prolog
|
||||
lib/spack/external/* linguist-vendored
|
||||
*.bat text eol=crlf
|
||||
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -89,7 +89,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@f03ac48505955848960e80bbb68046aa35c7b9e7 # @v1
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
|
||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==22.12.0 mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
||||
8
.github/workflows/windows_python.yml
vendored
8
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
# - uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
|
||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,3 +1,28 @@
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
* `buildcache create`: make "file exists" less verbose (#35019)
|
||||
* `spack mirror create`: don't change paths to urls (#34992)
|
||||
* Improve error message for requirements (#33988)
|
||||
* uninstall: fix accidental cubic complexity (#34005)
|
||||
* scons: fix signature for `install_args` (#34481)
|
||||
* Fix `combine_phase_logs` text encoding issues (#34657)
|
||||
* Use a module-like object to propagate changes in the MRO, when setting build env (#34059)
|
||||
* PackageBase should not define builder legacy attributes (#33942)
|
||||
* Forward lookup of the "run_tests" attribute (#34531)
|
||||
* Bugfix for timers (#33917, #33900)
|
||||
* Fix path handling in prefix inspections (#35318)
|
||||
* Fix libtool filter for Fujitsu compilers (#34916)
|
||||
* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375)
|
||||
* FileCache: delete the new cache file on exception (#34623)
|
||||
* Propagate exceptions from Spack python console (#34547)
|
||||
* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886)
|
||||
* Various CI fixes (#33953, #34560, #34560, #34828)
|
||||
* Docs: remove monitors and analyzers, typos (#34358, #33926)
|
||||
* bump release version for tutorial command (#33859)
|
||||
|
||||
|
||||
# v0.19.0 (2022-11-11)
|
||||
|
||||
`v0.19.0` is a major feature release.
|
||||
|
||||
@@ -72,6 +72,7 @@ config:
|
||||
root: $TMP_DIR/install
|
||||
misc_cache: $$user_cache_path/cache
|
||||
source_cache: $$user_cache_path/source
|
||||
environments_root: $TMP_DIR/envs
|
||||
EOF
|
||||
cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <<EOF
|
||||
bootstrap:
|
||||
|
||||
@@ -226,7 +226,7 @@ for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
|
||||
exit /b 0
|
||||
|
||||
:: set module system roots
|
||||
:_sp_multi_pathadd
|
||||
:_sp_multi_pathadd
|
||||
for %%I in (%~2) do (
|
||||
for %%Z in (%_sp_compatible_sys_types%) do (
|
||||
:pathadd "%~1" "%%I\%%Z"
|
||||
|
||||
@@ -13,16 +13,18 @@ concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: true
|
||||
# concretization. If `dependencies`, we'll only reuse dependencies but
|
||||
# give you a fresh concretization for your root specs.
|
||||
reuse: dependencies
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
# considered.
|
||||
targets:
|
||||
# Determine whether we want to target specific or generic microarchitectures.
|
||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||
# Determine whether we want to target specific or generic
|
||||
# microarchitectures. Valid values are: "microarchitectures" or "generic".
|
||||
# An example of "microarchitectures" would be "skylake" or "bulldozer",
|
||||
# while an example of "generic" would be "aarch64" or "x86_64_v4".
|
||||
granularity: microarchitectures
|
||||
# If "false" allow targets that are incompatible with the current host (for
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
@@ -33,4 +35,4 @@ concretizer:
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
unify: true
|
||||
|
||||
@@ -81,6 +81,10 @@ config:
|
||||
source_cache: $spack/var/spack/cache
|
||||
|
||||
|
||||
## Directory where spack managed environments are created and stored
|
||||
# environments_root: $spack/var/spack/environments
|
||||
|
||||
|
||||
# Cache directory for miscellaneous files, like the package index.
|
||||
# This can be purged with `spack clean --misc-cache`
|
||||
misc_cache: $user_cache_path/cache
|
||||
@@ -181,7 +185,7 @@ config:
|
||||
# when Spack needs to manage its own package metadata and all operations are
|
||||
# expected to complete within the default time limit. The timeout should
|
||||
# therefore generally be left untouched.
|
||||
db_lock_timeout: 3
|
||||
db_lock_timeout: 60
|
||||
|
||||
|
||||
# How long to wait when attempting to modify a package (e.g. to install it).
|
||||
|
||||
@@ -46,7 +46,7 @@ modules:
|
||||
|
||||
tcl:
|
||||
all:
|
||||
autoload: none
|
||||
autoload: direct
|
||||
|
||||
# Default configurations if lmod is enabled
|
||||
lmod:
|
||||
|
||||
@@ -28,7 +28,7 @@ packages:
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
golang: [go, gcc]
|
||||
go-external-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-ipp]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
|
||||
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -5,3 +5,4 @@ llnl*.rst
|
||||
_build
|
||||
.spack-env
|
||||
spack.lock
|
||||
_spack_root
|
||||
|
||||
@@ -942,7 +942,7 @@ first ``libelf`` above, you would run:
|
||||
|
||||
$ spack load /qmm4kso
|
||||
|
||||
To see which packages that you have loaded to your enviornment you would
|
||||
To see which packages that you have loaded to your environment you would
|
||||
use ``spack find --loaded``.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -18,7 +18,7 @@ your Spack mirror and then downloaded and installed by others.
|
||||
|
||||
Whenever a mirror provides prebuilt packages, Spack will take these packages
|
||||
into account during concretization and installation, making ``spack install``
|
||||
signficantly faster.
|
||||
significantly faster.
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -28,11 +28,14 @@ This package provides the following variants:
|
||||
|
||||
* **cuda_arch**
|
||||
|
||||
This variant supports the optional specification of the architecture.
|
||||
This variant supports the optional specification of one or multiple architectures.
|
||||
Valid values are maintained in the ``cuda_arch_values`` property and
|
||||
are the numeric character equivalent of the compute capability version
|
||||
(e.g., '10' for version 1.0). Each provided value affects associated
|
||||
``CUDA`` dependencies and compiler conflicts.
|
||||
|
||||
The variant builds both PTX code for the _virtual_ architecture
|
||||
(e.g. ``compute_10``) and binary code for the _real_ architecture (e.g. ``sm_10``).
|
||||
|
||||
GPUs and their compute capability versions are listed at
|
||||
https://developer.nvidia.com/cuda-gpus .
|
||||
|
||||
@@ -124,7 +124,7 @@ Using oneAPI Tools Installed by Spack
|
||||
=====================================
|
||||
|
||||
Spack can be a convenient way to install and configure compilers and
|
||||
libaries, even if you do not intend to build a Spack package. If you
|
||||
libraries, even if you do not intend to build a Spack package. If you
|
||||
want to build a Makefile project using Spack-installed oneAPI compilers,
|
||||
then use spack to configure your environment::
|
||||
|
||||
|
||||
@@ -397,7 +397,7 @@ for specifics and examples for ``packages.yaml`` files.
|
||||
|
||||
.. If your system administrator did not provide modules for pre-installed Intel
|
||||
tools, you could do well to ask for them, because installing multiple copies
|
||||
of the Intel tools, as is wont to happen once Spack is in the picture, is
|
||||
of the Intel tools, as is won't to happen once Spack is in the picture, is
|
||||
bound to stretch disk space and patience thin. If you *are* the system
|
||||
administrator and are still new to modules, then perhaps it's best to follow
|
||||
the `next section <Installing Intel tools within Spack_>`_ and install the tools
|
||||
@@ -653,7 +653,7 @@ follow `the next section <intel-install-libs_>`_ instead.
|
||||
* If you specified a custom variant (for example ``+vtune``) you may want to add this as your
|
||||
preferred variant in the packages configuration for the ``intel-parallel-studio`` package
|
||||
as described in :ref:`package-preferences`. Otherwise you will have to specify
|
||||
the variant everytime ``intel-parallel-studio`` is being used as ``mkl``, ``fftw`` or ``mpi``
|
||||
the variant every time ``intel-parallel-studio`` is being used as ``mkl``, ``fftw`` or ``mpi``
|
||||
implementation to avoid pulling in a different variant.
|
||||
|
||||
* To set the Intel compilers for default use in Spack, instead of the usual ``%gcc``,
|
||||
|
||||
@@ -366,7 +366,7 @@ If the ``pyproject.toml`` lists ``mesonpy`` as the ``build-backend``,
|
||||
it uses the meson build system. Meson uses the default
|
||||
``pyproject.toml`` keys to list dependencies.
|
||||
|
||||
See https://meson-python.readthedocs.io/en/latest/usage/start.html
|
||||
See https://meson-python.readthedocs.io/en/latest/tutorials/introduction.html
|
||||
for more information.
|
||||
|
||||
"""
|
||||
@@ -582,7 +582,7 @@ libraries. Make sure not to add modules/packages containing the word
|
||||
"test", as these likely won't end up in the installation directory,
|
||||
or may require test dependencies like pytest to be installed.
|
||||
|
||||
Instead of defining the ``import_modules`` explicity, only the subset
|
||||
Instead of defining the ``import_modules`` explicitly, only the subset
|
||||
of module names to be skipped can be defined by using ``skip_modules``.
|
||||
If a defined module has submodules, they are skipped as well, e.g.,
|
||||
in case the ``plotting`` modules should be excluded from the
|
||||
|
||||
@@ -58,9 +58,7 @@ Testing
|
||||
``WafPackage`` also provides ``test`` and ``installtest`` methods,
|
||||
which are run after the ``build`` and ``install`` phases, respectively.
|
||||
By default, these phases do nothing, but you can override them to
|
||||
run package-specific unit tests. For example, the
|
||||
`py-py2cairo <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-py2cairo/package.py>`_
|
||||
package uses:
|
||||
run package-specific unit tests.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
||||
@@ -89,6 +89,7 @@
|
||||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
|
||||
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
@@ -353,9 +354,7 @@ class SpackStyle(DefaultStyle):
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
|
||||
]
|
||||
latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
@@ -402,7 +401,7 @@ class SpackStyle(DefaultStyle):
|
||||
"Spack",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
@@ -418,6 +417,4 @@ class SpackStyle(DefaultStyle):
|
||||
# -- Extension configuration -------------------------------------------------
|
||||
|
||||
# sphinx.ext.intersphinx
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||
|
||||
@@ -222,7 +222,7 @@ and location. (See the *Configuration settings* section of ``man
|
||||
ccache`` to learn more about the default settings and how to change
|
||||
them). Please note that we currently disable ccache's ``hash_dir``
|
||||
feature to avoid an issue with the stage directory (see
|
||||
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
|
||||
https://github.com/spack/spack/pull/3761#issuecomment-294352232).
|
||||
|
||||
-----------------------
|
||||
``shared_linking:type``
|
||||
|
||||
@@ -227,6 +227,9 @@ You can get the name to use for ``<platform>`` by running ``spack arch
|
||||
--platform``. The system config scope has a ``<platform>`` section for
|
||||
sites at which ``/etc`` is mounted on multiple heterogeneous machines.
|
||||
|
||||
|
||||
.. _config-scope-precedence:
|
||||
|
||||
----------------
|
||||
Scope Precedence
|
||||
----------------
|
||||
@@ -239,6 +242,11 @@ lower-precedence settings. Completely ignoring higher-level configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
There are also special notations for string concatenation and precendense override.
|
||||
Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical
|
||||
to the default behavior. Using the ``-:`` works similarly, but for *appending* values.
|
||||
:ref:`config-prepend-append`
|
||||
|
||||
^^^^^^^^^^^
|
||||
Simple keys
|
||||
^^^^^^^^^^^
|
||||
@@ -279,6 +287,47 @@ command:
|
||||
- ~/.spack/stage
|
||||
|
||||
|
||||
.. _config-prepend-append:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
String Concatenation
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Above, the user ``config.yaml`` *completely* overrides specific settings in the
|
||||
default ``config.yaml``. Sometimes, it is useful to add a suffix/prefix
|
||||
to a path or name. To do this, you can use the ``-:`` notation for *append*
|
||||
string concatenation at the end of a key in a configuration file. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
:emphasize-lines: 1
|
||||
:caption: ~/.spack/config.yaml
|
||||
|
||||
config:
|
||||
install_tree-: /my/custom/suffix/
|
||||
|
||||
Spack will then append to the lower-precedence configuration under the
|
||||
``install_tree-:`` section:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory/my/custom/suffix
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
|
||||
|
||||
Similarly, ``+:`` can be used to *prepend* to a path or name:
|
||||
|
||||
.. code-block:: yaml
|
||||
:emphasize-lines: 1
|
||||
:caption: ~/.spack/config.yaml
|
||||
|
||||
config:
|
||||
install_tree+: /my/custom/suffix/
|
||||
|
||||
|
||||
.. _config-overrides:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -118,7 +118,7 @@ make another change, test that change, etc. We use `pytest
|
||||
<http://pytest.org/>`_ as our tests framework, and these types of
|
||||
arguments are just passed to the ``pytest`` command underneath. See `the
|
||||
pytest docs
|
||||
<http://doc.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests>`_
|
||||
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
|
||||
for more details on test selection syntax.
|
||||
|
||||
``spack unit-test`` has a few special options that can help you
|
||||
@@ -147,7 +147,7 @@ you want to know about. For example, to see just the tests in
|
||||
|
||||
You can also combine any of these options with a ``pytest`` keyword
|
||||
search. See the `pytest usage docs
|
||||
<https://docs.pytest.org/en/stable/usage.html#specifying-tests-selecting-tests>`_:
|
||||
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
|
||||
for more details on test selection syntax. For example, to see the names of all tests that have "spec"
|
||||
or "concretize" somewhere in their names:
|
||||
|
||||
|
||||
@@ -472,7 +472,7 @@ use my new hook as follows:
|
||||
.. code-block:: python
|
||||
|
||||
def post_log_write(message, level):
|
||||
"""Do something custom with the messsage and level every time we write
|
||||
"""Do something custom with the message and level every time we write
|
||||
to the log
|
||||
"""
|
||||
print('running post_log_write!')
|
||||
|
||||
@@ -58,9 +58,9 @@ Using Environments
|
||||
Here we follow a typical use case of creating, concretizing,
|
||||
installing and loading an environment.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Creating a named Environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Creating a managed Environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
An environment is created by:
|
||||
|
||||
@@ -72,7 +72,8 @@ Spack then creates the directory ``var/spack/environments/myenv``.
|
||||
|
||||
.. note::
|
||||
|
||||
All named environments are stored in the ``var/spack/environments`` folder.
|
||||
All managed environments by default are stored in the ``var/spack/environments`` folder.
|
||||
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
|
||||
|
||||
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
||||
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
||||
|
||||
@@ -21,7 +21,7 @@ be present on the machine where Spack is run:
|
||||
:header-rows: 1
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
||||
to run on HPC platforms like Cray. Not all packages should be expected
|
||||
to work on all platforms.
|
||||
@@ -1506,7 +1506,7 @@ Spack On Windows
|
||||
|
||||
Windows support for Spack is currently under development. While this work is still in an early stage,
|
||||
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
|
||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 1: Install prerequisites
|
||||
@@ -1516,7 +1516,7 @@ To use Spack on Windows, you will need the following packages:
|
||||
|
||||
Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Python
|
||||
* Git
|
||||
|
||||
Optional:
|
||||
@@ -1547,8 +1547,8 @@ Intel Fortran
|
||||
"""""""""""""
|
||||
|
||||
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
|
||||
The suite is free to download from Intel's website, located at
|
||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
|
||||
The suite is free to download from Intel's website, located at
|
||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html.
|
||||
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
|
||||
compiler's (ifort's) frontend and runtime libraries by using LLVM.
|
||||
|
||||
@@ -1597,8 +1597,8 @@ in a Windows CMD prompt.
|
||||
|
||||
.. note::
|
||||
If you chose to install Spack into a directory on Windows that is set up to require Administrative
|
||||
Privleges, Spack will require elevated privleges to run.
|
||||
Administrative Privleges can be denoted either by default such as
|
||||
Privileges, Spack will require elevated privileges to run.
|
||||
Administrative Privileges can be denoted either by default such as
|
||||
``C:\Program Files``, or aministrator applied administrative restrictions
|
||||
on a directory that spack installs files to such as ``C:\Users``
|
||||
|
||||
@@ -1694,7 +1694,7 @@ Spack console via:
|
||||
|
||||
spack install cpuinfo
|
||||
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should boostrap both packages
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Windows Compatible Packages
|
||||
|
||||
@@ -13,7 +13,7 @@ The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is often embraced also by
|
||||
individual programmers on their development machines. To support this
|
||||
common practice Spack integrates with `Environment Modules
|
||||
<http://modules.sourceforge.net/>`_ and `LMod
|
||||
<http://modules.sourceforge.net/>`_ and `Lmod
|
||||
<http://lmod.readthedocs.io/en/latest/>`_ by providing post-install hooks
|
||||
that generate module files and commands to manipulate them.
|
||||
|
||||
@@ -26,8 +26,8 @@ Using module files via Spack
|
||||
----------------------------
|
||||
|
||||
If you have installed a supported module system you should be able to
|
||||
run either ``module avail`` or ``use -l spack`` to see what module
|
||||
files have been installed. Here is sample output of those programs,
|
||||
run ``module avail`` to see what module
|
||||
files have been installed. Here is sample output of those programs,
|
||||
showing lots of installed packages:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -51,12 +51,7 @@ showing lots of installed packages:
|
||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||
|
||||
The names should look familiar, as they resemble the output from ``spack find``.
|
||||
You *can* use the modules here directly. For example, you could type either of these commands
|
||||
to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ use cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
For example, you could type the following command to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -93,9 +88,9 @@ the different file formats that can be generated by Spack:
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** |
|
||||
+=============================+====================+===============================+==============================================+======================+
|
||||
| **TCL - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/LMod |
|
||||
| **Tcl - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | LMod |
|
||||
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
|
||||
|
||||
@@ -396,13 +391,13 @@ name and version for all packages that depend on mpi.
|
||||
|
||||
When specifying module names by projection for Lmod modules, we
|
||||
recommend NOT including names of dependencies (e.g., MPI, compilers)
|
||||
that are already in the LMod hierarchy.
|
||||
that are already in the Lmod hierarchy.
|
||||
|
||||
|
||||
|
||||
.. note::
|
||||
TCL modules
|
||||
TCL modules also allow for explicit conflicts between modulefiles.
|
||||
Tcl modules
|
||||
Tcl modules also allow for explicit conflicts between modulefiles.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -426,9 +421,9 @@ that are already in the LMod hierarchy.
|
||||
|
||||
|
||||
.. note::
|
||||
LMod hierarchical module files
|
||||
Lmod hierarchical module files
|
||||
When ``lmod`` is activated Spack will generate a set of hierarchical lua module
|
||||
files that are understood by LMod. The hierarchy will always contain the
|
||||
files that are understood by Lmod. The hierarchy will always contain the
|
||||
two layers ``Core`` / ``Compiler`` but can be further extended to
|
||||
any of the virtual dependencies present in Spack. A case that could be useful in
|
||||
practice is for instance:
|
||||
@@ -450,7 +445,7 @@ that are already in the LMod hierarchy.
|
||||
|
||||
that will generate a hierarchy in which the ``lapack`` and ``mpi`` layer can be switched
|
||||
independently. This allows a site to build the same libraries or applications against different
|
||||
implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the
|
||||
implementations of ``mpi`` and ``lapack``, and let Lmod switch safely from one to the
|
||||
other.
|
||||
|
||||
All packages built with a compiler in ``core_compilers`` and all
|
||||
@@ -460,12 +455,12 @@ that are already in the LMod hierarchy.
|
||||
.. warning::
|
||||
Consistency of Core packages
|
||||
The user is responsible for maintining consistency among core packages, as ``core_specs``
|
||||
bypasses the hierarchy that allows LMod to safely switch between coherent software stacks.
|
||||
bypasses the hierarchy that allows Lmod to safely switch between coherent software stacks.
|
||||
|
||||
.. warning::
|
||||
Deep hierarchies and ``lmod spider``
|
||||
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
|
||||
See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
See `this discussion on the Lmod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
|
||||
""""""""""""""""""""""
|
||||
Select default modules
|
||||
@@ -534,7 +529,7 @@ installed to ``/spack/prefix/foo``, if ``foo`` installs executables to
|
||||
update ``MANPATH``.
|
||||
|
||||
The default list of environment variables in this config section
|
||||
inludes ``PATH``, ``MANPATH``, ``ACLOCAL_PATH``, ``PKG_CONFIG_PATH``
|
||||
includes ``PATH``, ``MANPATH``, ``ACLOCAL_PATH``, ``PKG_CONFIG_PATH``
|
||||
and ``CMAKE_PREFIX_PATH``, as well as ``DYLD_FALLBACK_LIBRARY_PATH``
|
||||
on macOS. On Linux however, the corresponding ``LD_LIBRARY_PATH``
|
||||
variable is *not* set, because it affects the behavior of
|
||||
@@ -634,8 +629,9 @@ by its dependency; when the dependency is autoloaded, the executable will be in
|
||||
PATH. Similarly for scripting languages such as Python, packages and their dependencies
|
||||
have to be loaded together.
|
||||
|
||||
Autoloading is enabled by default for LMod, as it has great builtin support for through
|
||||
the ``depends_on`` function. For Environment Modules it is disabled by default.
|
||||
Autoloading is enabled by default for Lmod and Environment Modules. The former
|
||||
has builtin support for through the ``depends_on`` function. The latter uses
|
||||
``module load`` statement to load and track dependencies.
|
||||
|
||||
Autoloading can also be enabled conditionally:
|
||||
|
||||
@@ -655,12 +651,14 @@ The allowed values for the ``autoload`` statement are either ``none``,
|
||||
``direct`` or ``all``.
|
||||
|
||||
.. note::
|
||||
TCL prerequisites
|
||||
Tcl prerequisites
|
||||
In the ``tcl`` section of the configuration file it is possible to use
|
||||
the ``prerequisites`` directive that accepts the same values as
|
||||
``autoload``. It will produce module files that have a ``prereq``
|
||||
statement, which can be used to autoload dependencies in some versions
|
||||
of Environment Modules.
|
||||
statement, which autoloads dependencies on Environment Modules when its
|
||||
``auto_handling`` configuration option is enabled. If Environment Modules
|
||||
is installed with Spack, ``auto_handling`` is enabled by default starting
|
||||
version 4.2. Otherwise it is enabled by default since version 5.0.
|
||||
|
||||
------------------------
|
||||
Maintaining Module Files
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,27 +9,32 @@
|
||||
CI Pipelines
|
||||
============
|
||||
|
||||
Spack provides commands that support generating and running automated build
|
||||
pipelines designed for Gitlab CI. At the highest level it works like this:
|
||||
provide a spack environment describing the set of packages you care about,
|
||||
and include within that environment file a description of how those packages
|
||||
should be mapped to Gitlab runners. Spack can then generate a ``.gitlab-ci.yml``
|
||||
file containing job descriptions for all your packages that can be run by a
|
||||
properly configured Gitlab CI instance. When run, the generated pipeline will
|
||||
build and deploy binaries, and it can optionally report to a CDash instance
|
||||
Spack provides commands that support generating and running automated build pipelines in CI instances. At the highest
|
||||
level it works like this: provide a spack environment describing the set of packages you care about, and include a
|
||||
description of how those packages should be mapped to Gitlab runners. Spack can then generate a ``.gitlab-ci.yml``
|
||||
file containing job descriptions for all your packages that can be run by a properly configured CI instance. When
|
||||
run, the generated pipeline will build and deploy binaries, and it can optionally report to a CDash instance
|
||||
regarding the health of the builds as they evolve over time.
|
||||
|
||||
------------------------------
|
||||
Getting started with pipelines
|
||||
------------------------------
|
||||
|
||||
It is fairly straightforward to get started with automated build pipelines. At
|
||||
a minimum, you'll need to set up a Gitlab instance (more about Gitlab CI
|
||||
`here <https://about.gitlab.com/product/continuous-integration/>`_) and configure
|
||||
at least one `runner <https://docs.gitlab.com/runner/>`_. Then the basic steps
|
||||
for setting up a build pipeline are as follows:
|
||||
To get started with automated build pipelines a Gitlab instance with version ``>= 12.9``
|
||||
(more about Gitlab CI `here <https://about.gitlab.com/product/continuous-integration/>`_)
|
||||
with at least one `runner <https://docs.gitlab.com/runner/>`_ configured is required. This
|
||||
can be done quickly by setting up a local Gitlab instance.
|
||||
|
||||
#. Create a repository on your gitlab instance
|
||||
It is possible to set up pipelines on gitlab.com, but the builds there are limited to
|
||||
60 minutes and generic hardware. It is possible to
|
||||
`hook up <https://about.gitlab.com/blog/2018/04/24/getting-started-gitlab-ci-gcp>`_
|
||||
Gitlab to Google Kubernetes Engine (`GKE <https://cloud.google.com/kubernetes-engine/>`_)
|
||||
or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), though those
|
||||
topics are outside the scope of this document.
|
||||
|
||||
After setting up a Gitlab instance for running CI, the basic steps for setting up a build pipeline are as follows:
|
||||
|
||||
#. Create a repository in the Gitlab instance with CI and a runner enabled.
|
||||
#. Add a ``spack.yaml`` at the root containing your pipeline environment
|
||||
#. Add a ``.gitlab-ci.yml`` at the root containing two jobs (one to generate
|
||||
the pipeline dynamically, and one to run the generated jobs).
|
||||
@@ -40,13 +45,6 @@ See the :ref:`functional_example` section for a minimal working example. See al
|
||||
the :ref:`custom_Workflow` section for a link to an example of a custom workflow
|
||||
based on spack pipelines.
|
||||
|
||||
While it is possible to set up pipelines on gitlab.com, as illustrated above, the
|
||||
builds there are limited to 60 minutes and generic hardware. It is also possible to
|
||||
`hook up <https://about.gitlab.com/blog/2018/04/24/getting-started-gitlab-ci-gcp>`_
|
||||
Gitlab to Google Kubernetes Engine (`GKE <https://cloud.google.com/kubernetes-engine/>`_)
|
||||
or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), though those
|
||||
topics are outside the scope of this document.
|
||||
|
||||
Spack's pipelines are now making use of the
|
||||
`trigger <https://docs.gitlab.com/ee/ci/yaml/#trigger>`_ syntax to run
|
||||
dynamically generated
|
||||
@@ -132,29 +130,35 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
mirrors: { "mirror": "s3://spack-public/mirror" }
|
||||
|
||||
gitlab-ci:
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
script:
|
||||
- pushd ${SPACK_CONCRETE_ENV_DIR} && spack env activate --without-view . && popd
|
||||
- spack -d ci rebuild
|
||||
mappings:
|
||||
- match: ["os=ubuntu18.04"]
|
||||
runner-attributes:
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
tags:
|
||||
- docker
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
rebuild-index: False
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- build-job:
|
||||
tags: [docker]
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
|
||||
|
||||
The elements of this file important to spack ci pipelines are described in more
|
||||
detail below, but there are a couple of things to note about the above working
|
||||
example:
|
||||
|
||||
.. note::
|
||||
There is no ``script`` attribute specified for here. The reason for this is
|
||||
Spack CI will automatically generate reasonable default scripts. More
|
||||
detail on what is in these scripts can be found below.
|
||||
|
||||
Also notice the ``before_script`` section. It is required when using any of the
|
||||
default scripts to source the ``setup-env.sh`` script in order to inform
|
||||
the default scripts where to find the ``spack`` executable.
|
||||
|
||||
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
||||
results in large binary artifacts getting transferred back and forth between
|
||||
gitlab and the runners. But in this example on gitlab.com where there is no
|
||||
@@ -174,7 +178,7 @@ during subsequent pipeline runs.
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
pipeline will require some changes:
|
||||
|
||||
* In the build jobs (``runner-attributes``), the environment location changed.
|
||||
* In the build-jobs, the environment location changed.
|
||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||
|
||||
@@ -196,9 +200,9 @@ ci pipelines. These commands are covered in more detail in this section.
|
||||
|
||||
.. _cmd-spack-ci:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^
|
||||
``spack ci``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Super-command for functionality related to generating pipelines and executing
|
||||
pipeline jobs.
|
||||
@@ -227,7 +231,7 @@ Using ``--prune-dag`` or ``--no-prune-dag`` configures whether or not jobs are
|
||||
generated for specs that are already up to date on the mirror. If enabling
|
||||
DAG pruning using ``--prune-dag``, more information may be required in your
|
||||
``spack.yaml`` file, see the :ref:`noop_jobs` section below regarding
|
||||
``service-job-attributes``.
|
||||
``noop-job``.
|
||||
|
||||
The optional ``--check-index-only`` argument can be used to speed up pipeline
|
||||
generation by telling spack to consider only remote buildcache indices when
|
||||
@@ -263,11 +267,11 @@ generated by jobs in the pipeline.
|
||||
|
||||
.. _cmd-spack-ci-rebuild:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack ci rebuild``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The purpose of ``spack ci rebuild`` is straightforward: take its assigned
|
||||
The purpose of ``spack ci rebuild`` is to take an assigned
|
||||
spec and ensure a binary of a successful build exists on the target mirror.
|
||||
If the binary does not already exist, it is built from source and pushed
|
||||
to the mirror. The associated stand-alone tests are optionally run against
|
||||
@@ -280,7 +284,7 @@ directory. The script is run in a job to install the spec from source. The
|
||||
resulting binary package is pushed to the mirror. If ``cdash`` is configured
|
||||
for the environment, then the build results will be uploaded to the site.
|
||||
|
||||
Environment variables and values in the ``gitlab-ci`` section of the
|
||||
Environment variables and values in the ``ci::pipeline-gen`` section of the
|
||||
``spack.yaml`` environment file provide inputs to this process. The
|
||||
two main sources of environment variables are variables written into
|
||||
``.gitlab-ci.yml`` by ``spack ci generate`` and the GitLab CI runtime.
|
||||
@@ -298,21 +302,23 @@ A snippet from an example ``spack.yaml`` file illustrating use of this
|
||||
option *and* specification of a package with broken tests is given below.
|
||||
The inclusion of a spec for building ``gptune`` is not shown here. Note
|
||||
that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
``gitlab-ci`` script.
|
||||
``build-job`` script.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
gitlab-ci:
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
@@ -354,113 +360,31 @@ arguments you can pass to ``spack ci reproduce-build`` in order to reproduce
|
||||
a particular build locally.
|
||||
|
||||
------------------------------------
|
||||
A pipeline-enabled spack environment
|
||||
Job Types
|
||||
------------------------------------
|
||||
|
||||
Here's an example of a spack environment file that has been enhanced with
|
||||
sections describing a build pipeline:
|
||||
^^^^^^^^^^^^^^^
|
||||
Rebuild (build)
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
.. code-block:: yaml
|
||||
Rebuild jobs, denoted as ``build-job``'s in the ``pipeline-gen`` list, are jobs
|
||||
associated with concrete specs that have been marked for rebuild. By default a simple
|
||||
script for doing rebuild is generated, but may be modified as needed.
|
||||
|
||||
spack:
|
||||
definitions:
|
||||
- pkgs:
|
||||
- readline@7.0
|
||||
- compilers:
|
||||
- '%gcc@5.5.0'
|
||||
- oses:
|
||||
- os=ubuntu18.04
|
||||
- os=centos7
|
||||
specs:
|
||||
- matrix:
|
||||
- [$pkgs]
|
||||
- [$compilers]
|
||||
- [$oses]
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
- match:
|
||||
- os=centos7
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/centos7
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: https://cdash.spack.io
|
||||
project: Spack
|
||||
site: Spack AWS Gitlab Instance
|
||||
The default script does three main steps, change directories to the pipelines concrete
|
||||
environment, activate the concrete environment, and run the ``spack ci rebuild`` command:
|
||||
|
||||
Hopefully, the ``definitions``, ``specs``, ``mirrors``, etc. sections are already
|
||||
familiar, as they are part of spack :ref:`environments`. So let's take a more
|
||||
in-depth look some of the pipeline-related sections in that environment file
|
||||
that might not be as familiar.
|
||||
.. code-block:: bash
|
||||
|
||||
The ``gitlab-ci`` section is used to configure how the pipeline workload should be
|
||||
generated, mainly how the jobs for building specs should be assigned to the
|
||||
configured runners on your instance. Each entry within the list of ``mappings``
|
||||
corresponds to a known gitlab runner, where the ``match`` section is used
|
||||
in assigning a release spec to one of the runners, and the ``runner-attributes``
|
||||
section is used to configure the spec/job for that particular runner.
|
||||
|
||||
Both the top-level ``gitlab-ci`` section as well as each ``runner-attributes``
|
||||
section can also contain the following keys: ``image``, ``tags``, ``variables``,
|
||||
``before_script``, ``script``, and ``after_script``. If any of these keys are
|
||||
provided at the ``gitlab-ci`` level, they will be used as the defaults for any
|
||||
``runner-attributes``, unless they are overridden in those sections. Specifying
|
||||
any of these keys at the ``runner-attributes`` level generally overrides the
|
||||
keys specified at the higher level, with a couple exceptions. Any ``variables``
|
||||
specified at both levels result in those dictionaries getting merged in the
|
||||
resulting generated job, and any duplicate variable names get assigned the value
|
||||
provided in the specific ``runner-attributes``. If ``tags`` are specified both
|
||||
at the ``gitlab-ci`` level as well as the ``runner-attributes`` level, then the
|
||||
lists of tags are combined, and any duplicates are removed.
|
||||
|
||||
See the section below on using a custom spack for an example of how these keys
|
||||
could be used.
|
||||
|
||||
There are other pipeline options you can configure within the ``gitlab-ci`` section
|
||||
as well.
|
||||
|
||||
The ``bootstrap`` section allows you to specify lists of specs from
|
||||
your ``definitions`` that should be staged ahead of the environment's ``specs`` (this
|
||||
section is described in more detail below). The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
The optional ``broken-specs-url`` key tells Spack to check against a list of
|
||||
specs that are known to be currently broken in ``develop``. If any such specs
|
||||
are found, the ``spack ci generate`` command will fail with an error message
|
||||
informing the user what broken specs were encountered. This allows the pipeline
|
||||
to fail early and avoid wasting compute resources attempting to build packages
|
||||
that will not succeed.
|
||||
|
||||
The optional ``cdash`` section provides information that will be used by the
|
||||
``spack ci generate`` command (invoked by ``spack ci start``) for reporting
|
||||
to CDash. All the jobs generated from this environment will belong to a
|
||||
"build group" within CDash that can be tracked over time. As the release
|
||||
progresses, this build group may have jobs added or removed. The url, project,
|
||||
and site are used to specify the CDash instance to which build results should
|
||||
be reported.
|
||||
|
||||
Take a look at the
|
||||
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/gitlab_ci.py>`_
|
||||
for the gitlab-ci section of the spack environment file, to see precisely what
|
||||
syntax is allowed there.
|
||||
cd ${concrete_environment_dir}
|
||||
spack env activate --without-view .
|
||||
spack ci rebuild
|
||||
|
||||
.. _rebuild_index:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Note about rebuilding buildcache index
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Update Index (reindex)
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, while a pipeline job may rebuild a package, create a buildcache
|
||||
entry, and push it to the mirror, it does not automatically re-generate the
|
||||
@@ -475,21 +399,44 @@ not correctly reflect the mirror's contents at the end of a pipeline.
|
||||
To make sure the buildcache index is up to date at the end of your pipeline,
|
||||
spack generates a job to update the buildcache index of the target mirror
|
||||
at the end of each pipeline by default. You can disable this behavior by
|
||||
adding ``rebuild-index: False`` inside the ``gitlab-ci`` section of your
|
||||
spack environment. Spack will assign the job any runner attributes found
|
||||
on the ``service-job-attributes``, if you have provided that in your
|
||||
``spack.yaml``.
|
||||
adding ``rebuild-index: False`` inside the ``ci`` section of your
|
||||
spack environment.
|
||||
|
||||
Reindex jobs do not allow modifying the ``script`` attribute since it is automatically
|
||||
generated using the target mirror listed in the ``mirrors::mirror`` configuration.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Signing (signing)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
This job is run after all of the rebuild jobs are completed and is intended to be used
|
||||
to sign the package binaries built by a protected CI run. Signing jobs are generated
|
||||
only if a signing job ``script`` is specified and the spack CI job type is protected.
|
||||
Note, if an ``any-job`` section contains a script, this will not implicitly create a
|
||||
``signing`` job, a signing job may only exist if it is explicitly specified in the
|
||||
configuration with a ``script`` attribute. Specifying a signing job without a script
|
||||
does not create a signing job and the job configuration attributes will be ignored.
|
||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Cleanup (cleanup)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
||||
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
||||
script, but do expect that the spack command is in the path and require a
|
||||
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
||||
|
||||
.. _noop_jobs:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Note about "no-op" jobs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^
|
||||
No Op (noop)
|
||||
^^^^^^^^^^^^
|
||||
|
||||
If no specs in an environment need to be rebuilt during a given pipeline run
|
||||
(meaning all are already up to date on the mirror), a single successful job
|
||||
(a NO-OP) is still generated to avoid an empty pipeline (which GitLab
|
||||
considers to be an error). An optional ``service-job-attributes`` section
|
||||
considers to be an error). The ``noop-job*`` sections
|
||||
can be added to your ``spack.yaml`` where you can provide ``tags`` and
|
||||
``image`` or ``variables`` for the generated NO-OP job. This section also
|
||||
supports providing ``before_script``, ``script``, and ``after_script``, in
|
||||
@@ -499,51 +446,100 @@ Following is an example of this section added to a ``spack.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- openmpi
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- os=centos8
|
||||
runner-attributes:
|
||||
tags:
|
||||
- custom
|
||||
- tag
|
||||
image: spack/centos7
|
||||
service-job-attributes:
|
||||
tags: ['custom', 'tag']
|
||||
image:
|
||||
name: 'some.image.registry/custom-image:latest'
|
||||
entrypoint: ['/bin/bash']
|
||||
script:
|
||||
- echo "Custom message in a custom script"
|
||||
spack:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- noop-job:
|
||||
tags: ['custom', 'tag']
|
||||
image:
|
||||
name: 'some.image.registry/custom-image:latest'
|
||||
entrypoint: ['/bin/bash']
|
||||
script::
|
||||
- echo "Custom message in a custom script"
|
||||
|
||||
The example above illustrates how you can provide the attributes used to run
|
||||
the NO-OP job in the case of an empty pipeline. The only field for the NO-OP
|
||||
job that might be generated for you is ``script``, but that will only happen
|
||||
if you do not provide one yourself.
|
||||
if you do not provide one yourself. Notice in this example the ``script``
|
||||
uses the ``::`` notation to prescribe override behavior. Without this, the
|
||||
``echo`` command would have been prepended to the automatically generated script
|
||||
rather than replacing it.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Assignment of specs to runners
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
------------------------------------
|
||||
ci.yaml
|
||||
------------------------------------
|
||||
|
||||
The ``mappings`` section corresponds to a list of runners, and during assignment
|
||||
of specs to runners, the list is traversed in order looking for matches, the
|
||||
first runner that matches a release spec is assigned to build that spec. The
|
||||
``match`` section within each runner mapping section is a list of specs, and
|
||||
if any of those specs match the release spec (the ``spec.satisfies()`` method
|
||||
is used), then that runner is considered a match.
|
||||
Here's an example of a spack configuration file describing a build pipeline:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration of specs/jobs for a runner
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
.. code-block:: yaml
|
||||
|
||||
Once a runner has been chosen to build a release spec, the ``runner-attributes``
|
||||
section provides information determining details of the job in the context of
|
||||
the runner. The ``runner-attributes`` section must have a ``tags`` key, which
|
||||
ci:
|
||||
target: gitlab
|
||||
|
||||
rebuild_index: True
|
||||
|
||||
broken-specs-url: https://broken.specs.url
|
||||
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- os=ubuntu18.04
|
||||
build-job:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
- match:
|
||||
- os=centos7
|
||||
build-job:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/centos7
|
||||
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: https://cdash.spack.io
|
||||
project: Spack
|
||||
site: Spack AWS Gitlab Instance
|
||||
|
||||
The ``ci`` config section is used to configure how the pipeline workload should be
|
||||
generated, mainly how the jobs for building specs should be assigned to the
|
||||
configured runners on your instance. The main section for configuring pipelines
|
||||
is ``pipeline-gen``, which is a list of job attribute sections that are merged,
|
||||
using the same rules as Spack configs (:ref:`config-scope-precedence`), from the bottom up.
|
||||
The order sections are applied is to be consistent with how spack orders scope precedence when merging lists.
|
||||
There are two main section types, ``<type>-job`` sections and ``submapping``
|
||||
sections.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Job Attribute Sections
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each type of job may have attributes added or removed via sections in the ``pipeline-gen``
|
||||
list. Job type specific attributes may be specified using the keys ``<type>-job`` to
|
||||
add attributes to all jobs of type ``<type>`` or ``<type>-job-remove`` to remove attributes
|
||||
of type ``<type>``. Each section may only contain one type of job attribute specification, ie. ,
|
||||
``build-job`` and ``noop-job`` may not coexist but ``build-job`` and ``build-job-remove`` may.
|
||||
|
||||
.. note::
|
||||
The ``*-remove`` specifications are applied before the additive attribute specification.
|
||||
For example, in the case where both ``build-job`` and ``build-job-remove`` are listed in
|
||||
the same ``pipeline-gen`` section, the value will still exist in the merged build-job after
|
||||
applying the section.
|
||||
|
||||
All of the attributes specified are forwarded to the generated CI jobs, however special
|
||||
treatment is applied to the attributes ``tags``, ``image``, ``variables``, ``script``,
|
||||
``before_script``, and ``after_script`` as they are components recognized explicitly by the
|
||||
Spack CI generator. For the ``tags`` attribute, Spack will remove reserved tags
|
||||
(:ref:`reserved_tags`) from all jobs specified in the config. In some cases, such as for
|
||||
``signing`` jobs, reserved tags will be added back based on the type of CI that is being run.
|
||||
|
||||
Once a runner has been chosen to build a release spec, the ``build-job*``
|
||||
sections provide information determining details of the job in the context of
|
||||
the runner. At lease one of the ``build-job*`` sections must contain a ``tags`` key, which
|
||||
is a list containing at least one tag used to select the runner from among the
|
||||
runners known to the gitlab instance. For Docker executor type runners, the
|
||||
``image`` key is used to specify the Docker image used to build the release spec
|
||||
@@ -554,7 +550,7 @@ information on to the runner that it needs to do its work (e.g. scheduler
|
||||
parameters, etc.). Any ``variables`` provided here will be added, verbatim, to
|
||||
each job.
|
||||
|
||||
The ``runner-attributes`` section also allows users to supply custom ``script``,
|
||||
The ``build-job`` section also allows users to supply custom ``script``,
|
||||
``before_script``, and ``after_script`` sections to be applied to every job
|
||||
scheduled on that runner. This allows users to do any custom preparation or
|
||||
cleanup tasks that fit their particular workflow, as well as completely
|
||||
@@ -565,46 +561,45 @@ environment directory is located within your ``--artifacts_root`` (or if not
|
||||
provided, within your ``$CI_PROJECT_DIR``), activates that environment for
|
||||
you, and invokes ``spack ci rebuild``.
|
||||
|
||||
.. _staging_algorithm:
|
||||
Sections that specify scripts (``script``, ``before_script``, ``after_script``) are all
|
||||
read as lists of commands or lists of lists of commands. It is recommended to write scripts
|
||||
as lists of lists if scripts will be composed via merging. The default behavior of merging
|
||||
lists will remove duplicate commands and potentially apply unwanted reordering, whereas
|
||||
merging lists of lists will preserve the local ordering and never removes duplicate
|
||||
commands. When writing commands to the CI target script, all lists are expanded and
|
||||
flattened into a single list.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Summary of ``.gitlab-ci.yml`` generation algorithm
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Submapping Sections
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All specs yielded by the matrix (or all the specs in the environment) have their
|
||||
dependencies computed, and the entire resulting set of specs are staged together
|
||||
before being run through the ``gitlab-ci/mappings`` entries, where each staged
|
||||
spec is assigned a runner. "Staging" is the name given to the process of
|
||||
figuring out in what order the specs should be built, taking into consideration
|
||||
Gitlab CI rules about jobs/stages. In the staging process the goal is to maximize
|
||||
the number of jobs in any stage of the pipeline, while ensuring that the jobs in
|
||||
any stage only depend on jobs in previous stages (since those jobs are guaranteed
|
||||
to have completed already). As a runner is determined for a job, the information
|
||||
in the ``runner-attributes`` is used to populate various parts of the job
|
||||
description that will be used by Gitlab CI. Once all the jobs have been assigned
|
||||
a runner, the ``.gitlab-ci.yml`` is written to disk.
|
||||
A special case of attribute specification is the ``submapping`` section which may be used
|
||||
to apply job attributes to build jobs based on the package spec associated with the rebuild
|
||||
job. Submapping is specified as a list of spec ``match`` lists associated with
|
||||
``build-job``/``build-job-remove`` sections. There are two options for ``match_behavior``,
|
||||
either ``first`` or ``merge`` may be specified. In either case, the ``submapping`` list is
|
||||
processed from the bottom up, and then each ``match`` list is searched for a string that
|
||||
satisfies the check ``spec.satisfies({match_item})`` for each concrete spec.
|
||||
|
||||
The short example provided above would result in the ``readline``, ``ncurses``,
|
||||
and ``pkgconf`` packages getting staged and built on the runner chosen by the
|
||||
``spack-k8s`` tag. In this example, spack assumes the runner is a Docker executor
|
||||
type runner, and thus certain jobs will be run in the ``centos7`` container,
|
||||
and others in the ``ubuntu-18.04`` container. The resulting ``.gitlab-ci.yml``
|
||||
will contain 6 jobs in three stages. Once the jobs have been generated, the
|
||||
presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
||||
``spack ci generate`` command would result in all of the jobs being put in a
|
||||
build group on CDash called "Release Testing" (that group will be created if
|
||||
it didn't already exist).
|
||||
The the case of ``match_behavior: first``, the first ``match`` section in the list of
|
||||
``submappings`` that contains a string that satisfies the spec will apply it's
|
||||
``build-job*`` attributes to the rebuild job associated with that spec. This is the
|
||||
default behavior and will be the method if no ``match_behavior`` is specified.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional compiler bootstrapping
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
The the case of ``merge`` match, all of the ``match`` sections in the list of
|
||||
``submappings`` that contain a string that satisfies the spec will have the associated
|
||||
``build-job*`` attributes applied to the rebuild job associated with that spec. Again,
|
||||
the attributes will be merged starting from the bottom match going up to the top match.
|
||||
|
||||
Spack pipelines also have support for bootstrapping compilers on systems that
|
||||
may not already have the desired compilers installed. The idea here is that
|
||||
you can specify a list of things to bootstrap in your ``definitions``, and
|
||||
spack will guarantee those will be installed in a phase of the pipeline before
|
||||
your release specs, so that you can rely on those packages being available in
|
||||
the binary mirror when you need them later on in the pipeline. At the moment
|
||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Bootstrapping
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
|
||||
The ``bootstrap`` section allows you to specify lists of specs from
|
||||
your ``definitions`` that should be staged ahead of the environment's ``specs``. At the moment
|
||||
the only viable use-case for bootstrapping is to install compilers.
|
||||
|
||||
Here's an example of what bootstrapping some compilers might look like:
|
||||
@@ -680,6 +675,86 @@ environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
specs will be staged for building) and the runners will be expected to already
|
||||
have all needed compilers installed and configured for spack to use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Pipeline Buildcache
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Broken Specs URL
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
The optional ``broken-specs-url`` key tells Spack to check against a list of
|
||||
specs that are known to be currently broken in ``develop``. If any such specs
|
||||
are found, the ``spack ci generate`` command will fail with an error message
|
||||
informing the user what broken specs were encountered. This allows the pipeline
|
||||
to fail early and avoid wasting compute resources attempting to build packages
|
||||
that will not succeed.
|
||||
|
||||
^^^^^
|
||||
CDash
|
||||
^^^^^
|
||||
|
||||
The optional ``cdash`` section provides information that will be used by the
|
||||
``spack ci generate`` command (invoked by ``spack ci start``) for reporting
|
||||
to CDash. All the jobs generated from this environment will belong to a
|
||||
"build group" within CDash that can be tracked over time. As the release
|
||||
progresses, this build group may have jobs added or removed. The url, project,
|
||||
and site are used to specify the CDash instance to which build results should
|
||||
be reported.
|
||||
|
||||
Take a look at the
|
||||
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/ci.py>`_
|
||||
for the gitlab-ci section of the spack environment file, to see precisely what
|
||||
syntax is allowed there.
|
||||
|
||||
.. _reserved_tags:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Reserved Tags
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Spack has a subset of tags (``public``, ``protected``, and ``notary``) that it reserves
|
||||
for classifying runners that may require special permissions or access. The tags
|
||||
``public`` and ``protected`` are used to distinguish between runners that use public
|
||||
permissions and runners with protected permissions. The ``notary`` tag is a special tag
|
||||
that is used to indicate runners that have access to the highly protected information
|
||||
used for signing binaries using the ``signing`` job.
|
||||
|
||||
.. _staging_algorithm:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Summary of ``.gitlab-ci.yml`` generation algorithm
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All specs yielded by the matrix (or all the specs in the environment) have their
|
||||
dependencies computed, and the entire resulting set of specs are staged together
|
||||
before being run through the ``ci/pipeline-gen`` entries, where each staged
|
||||
spec is assigned a runner. "Staging" is the name given to the process of
|
||||
figuring out in what order the specs should be built, taking into consideration
|
||||
Gitlab CI rules about jobs/stages. In the staging process the goal is to maximize
|
||||
the number of jobs in any stage of the pipeline, while ensuring that the jobs in
|
||||
any stage only depend on jobs in previous stages (since those jobs are guaranteed
|
||||
to have completed already). As a runner is determined for a job, the information
|
||||
in the merged ``any-job*`` and ``build-job*`` sections is used to populate various parts of the job
|
||||
description that will be used by the target CI pipelines. Once all the jobs have been assigned
|
||||
a runner, the ``.gitlab-ci.yml`` is written to disk.
|
||||
|
||||
The short example provided above would result in the ``readline``, ``ncurses``,
|
||||
and ``pkgconf`` packages getting staged and built on the runner chosen by the
|
||||
``spack-k8s`` tag. In this example, spack assumes the runner is a Docker executor
|
||||
type runner, and thus certain jobs will be run in the ``centos7`` container,
|
||||
and others in the ``ubuntu-18.04`` container. The resulting ``.gitlab-ci.yml``
|
||||
will contain 6 jobs in three stages. Once the jobs have been generated, the
|
||||
presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
||||
``spack ci generate`` command would result in all of the jobs being put in a
|
||||
build group on CDash called "Release Testing" (that group will be created if
|
||||
it didn't already exist).
|
||||
|
||||
-------------------------------------
|
||||
Using a custom spack in your pipeline
|
||||
-------------------------------------
|
||||
@@ -726,23 +801,21 @@ generated by ``spack ci generate``. You also want your generated rebuild jobs
|
||||
|
||||
spack:
|
||||
...
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
script:
|
||||
- spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack -d ci rebuild
|
||||
after_script:
|
||||
- rm -rf ./spack
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags:
|
||||
- spack-kube
|
||||
image: spack/ubuntu-bionic
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
script:
|
||||
- spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack -d ci rebuild
|
||||
after_script:
|
||||
- rm -rf ./spack
|
||||
|
||||
Now all of the generated rebuild jobs will use the same shell script to clone
|
||||
spack before running their actual workload.
|
||||
@@ -831,3 +904,4 @@ verify binary packages (when installing or creating buildcaches). You could
|
||||
also have already trusted a key spack know about, or if no key is present anywhere,
|
||||
spack will install specs using ``--no-check-signature`` and create buildcaches
|
||||
using ``-u`` (for unsigned binaries).
|
||||
|
||||
|
||||
93
lib/spack/env/cc
vendored
93
lib/spack/env/cc
vendored
@@ -427,6 +427,48 @@ isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
# drop -Wl
|
||||
shift
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
rp="$1"
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
rp=""
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
rp="${1#-rpath=}"
|
||||
;;
|
||||
--rpath=*)
|
||||
rp="${1#--rpath=}"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
if [ -n "$rp" ]; then
|
||||
if system_dir "$rp"; then
|
||||
append system_rpath_dirs_list "$rp"
|
||||
else
|
||||
append rpath_dirs_list "$rp"
|
||||
fi
|
||||
fi
|
||||
shift
|
||||
done
|
||||
# By lack of local variables, always set this to empty string.
|
||||
rp=""
|
||||
}
|
||||
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
@@ -526,54 +568,9 @@ while [ $# -ne 0 ]; do
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
arg="${1#-Wl,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath,*) rp="${arg#-rpath,}" ;;
|
||||
--rpath,*) rp="${arg#--rpath,}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Wl,*)
|
||||
rp="${arg#-Wl,}"
|
||||
;;
|
||||
*)
|
||||
die "-Wl,-rpath was not followed by -Wl,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
: # We want to remove explicitly this flag
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$arg"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
-Xlinker,*)
|
||||
arg="${1#-Xlinker,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Xlinker,*)
|
||||
rp="${arg#-Xlinker,}"
|
||||
;;
|
||||
*)
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Xlinker,$arg"
|
||||
;;
|
||||
esac
|
||||
IFS=,
|
||||
parse_Wl $1
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
if [ "$2" = "-rpath" ]; then
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from sys import platform as _platform
|
||||
from typing import Callable, List, Match, Optional, Tuple, Union
|
||||
|
||||
from llnl.util import tty
|
||||
@@ -26,9 +25,7 @@
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
import grp
|
||||
import pwd
|
||||
else:
|
||||
@@ -84,9 +81,77 @@
|
||||
"visit_directory_tree",
|
||||
]
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||
# files on Lustre when using Python < 3.7.4
|
||||
|
||||
def copystat(src, dst, follow_symlinks=True):
|
||||
"""Copy file metadata
|
||||
Copy the permission bits, last access time, last modification time, and
|
||||
flags from `src` to `dst`. On Linux, copystat() also copies the "extended
|
||||
attributes" where possible. The file contents, owner, and group are
|
||||
unaffected. `src` and `dst` are path names given as strings.
|
||||
If the optional flag `follow_symlinks` is not set, symlinks aren't
|
||||
followed if and only if both `src` and `dst` are symlinks.
|
||||
"""
|
||||
|
||||
def _nop(args, ns=None, follow_symlinks=None):
|
||||
pass
|
||||
|
||||
# follow symlinks (aka don't not follow symlinks)
|
||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||
if follow:
|
||||
# use the real function if it exists
|
||||
def lookup(name):
|
||||
return getattr(os, name, _nop)
|
||||
|
||||
else:
|
||||
# use the real function only if it exists
|
||||
# *and* it supports follow_symlinks
|
||||
def lookup(name):
|
||||
fn = getattr(os, name, _nop)
|
||||
if sys.version_info >= (3, 3):
|
||||
if fn in os.supports_follow_symlinks: # novermin
|
||||
return fn
|
||||
return _nop
|
||||
|
||||
st = lookup("stat")(src, follow_symlinks=follow)
|
||||
mode = stat.S_IMODE(st.st_mode)
|
||||
lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns), follow_symlinks=follow)
|
||||
|
||||
# We must copy extended attributes before the file is (potentially)
|
||||
# chmod()'ed read-only, otherwise setxattr() will error with -EACCES.
|
||||
shutil._copyxattr(src, dst, follow_symlinks=follow)
|
||||
|
||||
try:
|
||||
lookup("chmod")(dst, mode, follow_symlinks=follow)
|
||||
except NotImplementedError:
|
||||
# if we got a NotImplementedError, it's because
|
||||
# * follow_symlinks=False,
|
||||
# * lchown() is unavailable, and
|
||||
# * either
|
||||
# * fchownat() is unavailable or
|
||||
# * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW.
|
||||
# (it returned ENOSUP.)
|
||||
# therefore we're out of options--we simply cannot chown the
|
||||
# symlink. give up, suppress the error.
|
||||
# (which is what shutil always did in this circumstance.)
|
||||
pass
|
||||
if hasattr(st, "st_flags"):
|
||||
try:
|
||||
lookup("chflags")(dst, st.st_flags, follow_symlinks=follow)
|
||||
except OSError as why:
|
||||
for err in "EOPNOTSUPP", "ENOTSUP":
|
||||
if hasattr(errno, err) and why.errno == getattr(errno, err):
|
||||
break
|
||||
else:
|
||||
raise
|
||||
|
||||
shutil.copystat = copystat
|
||||
|
||||
|
||||
def getuid():
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
@@ -99,7 +164,7 @@ def getuid():
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
@@ -128,7 +193,7 @@ def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
# -h option (no-dereference) does not exist in Windows
|
||||
return file_command("-b", "--mime-type")
|
||||
else:
|
||||
@@ -268,7 +333,6 @@ def groupid_to_group(x):
|
||||
regex = re.escape(regex)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
@@ -484,7 +548,7 @@ def get_owner_uid(path, err_msg=None):
|
||||
else:
|
||||
p_stat = os.stat(path)
|
||||
|
||||
if _platform != "win32":
|
||||
if sys.platform != "win32":
|
||||
owner_uid = p_stat.st_uid
|
||||
else:
|
||||
sid = win32security.GetFileSecurity(
|
||||
@@ -517,7 +581,7 @@ def group_ids(uid=None):
|
||||
Returns:
|
||||
(list of int): gids of groups the user is a member of
|
||||
"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
tty.warn("Function is not supported on Windows")
|
||||
return []
|
||||
|
||||
@@ -537,7 +601,7 @@ def group_ids(uid=None):
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
|
||||
if isinstance(group, str):
|
||||
@@ -1064,7 +1128,7 @@ def open_if_filename(str_or_file, mode="r"):
|
||||
@system_path_filter
|
||||
def touch(path):
|
||||
"""Creates an empty file at the specified path."""
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
perms = os.O_WRONLY | os.O_CREAT
|
||||
else:
|
||||
perms = os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY
|
||||
@@ -1126,7 +1190,7 @@ def temp_cwd():
|
||||
yield tmp_dir
|
||||
finally:
|
||||
kwargs = {}
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
kwargs["ignore_errors"] = False
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
shutil.rmtree(tmp_dir, **kwargs)
|
||||
@@ -1220,7 +1284,6 @@ def traverse_tree(
|
||||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
@@ -1372,7 +1435,7 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
@@ -1481,11 +1544,11 @@ def readonly_file_handler(ignore_errors=False):
|
||||
"""
|
||||
|
||||
def error_remove_readonly(func, path, exc):
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
raise RuntimeError("This method should only be invoked on Windows")
|
||||
excvalue = exc[1]
|
||||
if (
|
||||
is_windows
|
||||
sys.platform == "win32"
|
||||
and func in (os.rmdir, os.remove, os.unlink)
|
||||
and excvalue.errno == errno.EACCES
|
||||
):
|
||||
@@ -1515,7 +1578,7 @@ def remove_linked_tree(path):
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly.
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
kwargs["ignore_errors"] = False
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
@@ -1662,7 +1725,6 @@ def find(root, files, recursive=True):
|
||||
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
@@ -2030,7 +2092,7 @@ def names(self):
|
||||
# on non Windows platform
|
||||
# Windows valid library extensions are:
|
||||
# ['.dll', '.lib']
|
||||
valid_exts = [".dll", ".lib"] if is_windows else [".dylib", ".so", ".a"]
|
||||
valid_exts = [".dll", ".lib"] if sys.platform == "win32" else [".dylib", ".so", ".a"]
|
||||
for ext in valid_exts:
|
||||
i = name.rfind(ext)
|
||||
if i != -1:
|
||||
@@ -2178,7 +2240,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
message = message.format(find_libraries.__name__, type(libraries))
|
||||
raise TypeError(message)
|
||||
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
static_ext = "lib"
|
||||
# For linking (runtime=False) you need the .lib files regardless of
|
||||
# whether you are doing a shared or static link
|
||||
@@ -2210,7 +2272,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
# finally search all of root recursively. The search stops when the first
|
||||
# match is found.
|
||||
common_lib_dirs = ["lib", "lib64"]
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
common_lib_dirs.extend(["bin", "Lib"])
|
||||
|
||||
for subdir in common_lib_dirs:
|
||||
@@ -2345,7 +2407,7 @@ def _link(self, path, dest_dir):
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
# associate with trying to create a file that already exists (winerror 183)
|
||||
except OSError as e:
|
||||
if e.winerror == 183:
|
||||
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(dest_file)
|
||||
@@ -2633,3 +2695,28 @@ def temporary_dir(
|
||||
yield tmp_dir
|
||||
finally:
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
|
||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
||||
Args:
|
||||
print_bytes (int): Number of bytes to print from start/end of file
|
||||
|
||||
Returns:
|
||||
Tuple of size and byte string containing first n .. last n bytes.
|
||||
Size is 0 if file cannot be read."""
|
||||
try:
|
||||
n = print_bytes
|
||||
with open(path, "rb") as f:
|
||||
size = os.fstat(f.fileno()).st_size
|
||||
if size <= 2 * n:
|
||||
short_contents = f.read(2 * n)
|
||||
else:
|
||||
short_contents = f.read(n)
|
||||
f.seek(-n, 2)
|
||||
short_contents += b"..." + f.read(n)
|
||||
return size, short_contents
|
||||
except OSError:
|
||||
return 0, b""
|
||||
|
||||
@@ -198,7 +198,7 @@ def _memoized_function(*args, **kwargs):
|
||||
except TypeError as e:
|
||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||
raise UnhashableArguments(
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__)
|
||||
) from e
|
||||
|
||||
return _memoized_function
|
||||
@@ -237,6 +237,7 @@ def decorator_with_or_without_args(decorator):
|
||||
@decorator
|
||||
|
||||
"""
|
||||
|
||||
# See https://stackoverflow.com/questions/653368 for more on this
|
||||
@functools.wraps(decorator)
|
||||
def new_dec(*args, **kwargs):
|
||||
@@ -990,8 +991,7 @@ def enum(**kwargs):
|
||||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable: Iterable,
|
||||
predicate_fn: Callable[[Any], bool],
|
||||
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||
) -> Tuple[List[Any], List[Any]]:
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
@@ -1104,11 +1104,7 @@ def __enter__(self):
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
|
||||
@@ -75,7 +75,7 @@ def __init__(self, ignore=None):
|
||||
# so that we have a fast lookup and can run mkdir in order.
|
||||
self.directories = OrderedDict()
|
||||
|
||||
# Files to link. Maps dst_rel to (src_rel, src_root)
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel)
|
||||
self.files = OrderedDict()
|
||||
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
@@ -430,6 +430,11 @@ class MergeConflictError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ConflictingSpecsError(MergeConflictError):
|
||||
def __init__(self, spec_1, spec_2):
|
||||
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
||||
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||
|
||||
@@ -18,7 +18,7 @@ class Barrier:
|
||||
|
||||
Python 2 doesn't have multiprocessing barriers so we implement this.
|
||||
|
||||
See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
|
||||
See https://greenteapress.com/semaphores/LittleBookOfSemaphores.pdf, p. 41.
|
||||
"""
|
||||
|
||||
def __init__(self, n, timeout=None):
|
||||
|
||||
@@ -5,15 +5,13 @@
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
from sys import platform as _platform
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
|
||||
@@ -23,7 +21,7 @@ def symlink(real_path, link_path):
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
@@ -32,9 +30,15 @@ def symlink(real_path, link_path):
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
# EEXIST error indicates that file we're trying to "link"
|
||||
# is already present, don't bother trying to copy which will also fail
|
||||
# just raise
|
||||
raise
|
||||
else:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
|
||||
|
||||
def islink(path):
|
||||
@@ -99,7 +103,7 @@ def _win32_is_junction(path):
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
|
||||
if is_windows:
|
||||
if sys.platform == "win32":
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
|
||||
@@ -108,7 +108,6 @@ class SuppressOutput:
|
||||
"""Class for disabling output in a scope using 'with' keyword"""
|
||||
|
||||
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
|
||||
|
||||
self._msg_enabled_initial = _msg_enabled
|
||||
self._warn_enabled_initial = _warn_enabled
|
||||
self._error_enabled_initial = _error_enabled
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from typing import IO, Any, List, Optional
|
||||
|
||||
from llnl.util.tty import terminal_size
|
||||
from llnl.util.tty.color import cextra, clen
|
||||
@@ -97,7 +98,16 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
|
||||
return config
|
||||
|
||||
|
||||
def colify(elts, **options):
|
||||
def colify(
|
||||
elts: List[Any],
|
||||
cols: int = 0,
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
tty: Optional[bool] = None,
|
||||
method: str = "variable",
|
||||
console_cols: Optional[int] = None,
|
||||
):
|
||||
"""Takes a list of elements as input and finds a good columnization
|
||||
of them, similar to how gnu ls does. This supports both
|
||||
uniform-width and variable-width (tighter) columns.
|
||||
@@ -106,31 +116,21 @@ def colify(elts, **options):
|
||||
using ``str()``.
|
||||
|
||||
Keyword Arguments:
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
indent (int): Optionally indent all columns by some number of spaces
|
||||
padding (int): Spaces between columns. Default is 2
|
||||
width (int): Width of the output. Default is 80 if tty not detected
|
||||
cols (int): Force number of columns. Default is to size to terminal, or
|
||||
output: A file object to write to. Default is ``sys.stdout``
|
||||
indent: Optionally indent all columns by some number of spaces
|
||||
padding: Spaces between columns. Default is 2
|
||||
width: Width of the output. Default is 80 if tty not detected
|
||||
cols: Force number of columns. Default is to size to terminal, or
|
||||
single-column if no tty
|
||||
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
|
||||
tty: Whether to attempt to write to a tty. Default is to autodetect a
|
||||
tty. Set to False to force single-column output
|
||||
method (str): Method to use to fit columns. Options are variable or uniform.
|
||||
method: Method to use to fit columns. Options are variable or uniform.
|
||||
Variable-width columns are tighter, uniform columns are all the same width
|
||||
and fit less data on the screen
|
||||
console_cols: number of columns on this console (default: autodetect)
|
||||
"""
|
||||
# Get keyword arguments or set defaults
|
||||
cols = options.pop("cols", 0)
|
||||
output = options.pop("output", sys.stdout)
|
||||
indent = options.pop("indent", 0)
|
||||
padding = options.pop("padding", 2)
|
||||
tty = options.pop("tty", None)
|
||||
method = options.pop("method", "variable")
|
||||
console_cols = options.pop("width", None)
|
||||
|
||||
if options:
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for this function." % next(options.iterkeys())
|
||||
)
|
||||
if output is None:
|
||||
output = sys.stdout
|
||||
|
||||
# elts needs to be an array of strings so we can count the elements
|
||||
elts = [str(elt) for elt in elts]
|
||||
@@ -153,10 +153,11 @@ def colify(elts, **options):
|
||||
cols = 1
|
||||
|
||||
# Specify the number of character columns to use.
|
||||
if not console_cols:
|
||||
if console_cols is None:
|
||||
console_rows, console_cols = terminal_size()
|
||||
elif type(console_cols) != int:
|
||||
elif not isinstance(console_cols, int):
|
||||
raise ValueError("Number of columns must be an int")
|
||||
|
||||
console_cols = max(1, console_cols - indent)
|
||||
|
||||
# Choose a method. Variable-width colums vs uniform-width.
|
||||
@@ -192,7 +193,13 @@ def colify(elts, **options):
|
||||
return (config.cols, tuple(config.widths))
|
||||
|
||||
|
||||
def colify_table(table, **options):
|
||||
def colify_table(
|
||||
table: List[List[Any]],
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
console_cols: Optional[int] = None,
|
||||
):
|
||||
"""Version of ``colify()`` for data expressed in rows, (list of lists).
|
||||
|
||||
Same as regular colify but:
|
||||
@@ -218,20 +225,38 @@ def transpose():
|
||||
for row in table:
|
||||
yield row[i]
|
||||
|
||||
if "cols" in options:
|
||||
raise ValueError("Cannot override columsn in colify_table.")
|
||||
options["cols"] = columns
|
||||
|
||||
# don't reduce to 1 column for non-tty
|
||||
options["tty"] = True
|
||||
|
||||
colify(transpose(), **options)
|
||||
colify(
|
||||
transpose(),
|
||||
cols=columns, # this is always the number of cols in the table
|
||||
tty=True, # don't reduce to 1 column for non-tty
|
||||
output=output,
|
||||
indent=indent,
|
||||
padding=padding,
|
||||
console_cols=console_cols,
|
||||
)
|
||||
|
||||
|
||||
def colified(elts, **options):
|
||||
def colified(
|
||||
elts: List[Any],
|
||||
cols: int = 0,
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
tty: Optional[bool] = None,
|
||||
method: str = "variable",
|
||||
console_cols: Optional[int] = None,
|
||||
):
|
||||
"""Invokes the ``colify()`` function but returns the result as a string
|
||||
instead of writing it to an output string."""
|
||||
sio = io.StringIO()
|
||||
options["output"] = sio
|
||||
colify(elts, **options)
|
||||
colify(
|
||||
elts,
|
||||
cols=cols,
|
||||
output=sio,
|
||||
indent=indent,
|
||||
padding=padding,
|
||||
tty=tty,
|
||||
method=method,
|
||||
console_cols=console_cols,
|
||||
)
|
||||
return sio.getvalue()
|
||||
|
||||
@@ -161,10 +161,7 @@ def _is_background(self):
|
||||
def _get_canon_echo_flags(self):
|
||||
"""Get current termios canonical and echo settings."""
|
||||
cfg = termios.tcgetattr(self.stream)
|
||||
return (
|
||||
bool(cfg[3] & termios.ICANON),
|
||||
bool(cfg[3] & termios.ECHO),
|
||||
)
|
||||
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||
|
||||
def _enable_keyboard_input(self):
|
||||
"""Disable canonical input and echoing on ``self.stream``."""
|
||||
|
||||
@@ -77,10 +77,7 @@ def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
|
||||
def get_canon_echo_attrs(self):
|
||||
"""Get echo and canon attributes of the terminal of controller_fd."""
|
||||
cfg = termios.tcgetattr(self.controller_fd)
|
||||
return (
|
||||
bool(cfg[3] & termios.ICANON),
|
||||
bool(cfg[3] & termios.ECHO),
|
||||
)
|
||||
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||
|
||||
def horizontal_line(self, name):
|
||||
"""Labled horizontal line for debugging."""
|
||||
@@ -92,11 +89,7 @@ def status(self):
|
||||
if self.debug:
|
||||
canon, echo = self.get_canon_echo_attrs()
|
||||
sys.stderr.write(
|
||||
"canon: %s, echo: %s\n"
|
||||
% (
|
||||
"on" if canon else "off",
|
||||
"on" if echo else "off",
|
||||
)
|
||||
"canon: %s, echo: %s\n" % ("on" if canon else "off", "on" if echo else "off")
|
||||
)
|
||||
sys.stderr.write("input: %s\n" % self.input_on())
|
||||
sys.stderr.write("bg: %s\n" % self.background())
|
||||
|
||||
@@ -25,7 +25,7 @@ def architecture_compatible(self, target, constraint):
|
||||
return (
|
||||
not target.architecture
|
||||
or not constraint.architecture
|
||||
or target.architecture.satisfies(constraint.architecture)
|
||||
or target.architecture.intersects(constraint.architecture)
|
||||
)
|
||||
|
||||
@memoized
|
||||
@@ -104,7 +104,7 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
for cversion in child.compiler.versions:
|
||||
# For a few compilers use specialized comparisons.
|
||||
# Otherwise match on version match.
|
||||
if pversion.satisfies(cversion):
|
||||
if pversion.intersects(cversion):
|
||||
return True
|
||||
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
|
||||
pversion, cversion
|
||||
|
||||
@@ -321,8 +321,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name,
|
||||
full_index_arg,
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
@@ -696,8 +695,11 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
error_msg = "The variant '{}' default value in package '{}' cannot be validated"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
error_msg = (
|
||||
"The default value of the variant '{}' in package '{}' failed validation"
|
||||
)
|
||||
question = "Is it among the allowed values?"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [question]))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -722,7 +724,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
assert any(v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
except Exception:
|
||||
summary = (
|
||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||
|
||||
@@ -6,6 +6,8 @@
|
||||
import codecs
|
||||
import collections
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
@@ -20,7 +22,8 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -39,7 +42,10 @@
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
@@ -209,10 +215,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
break
|
||||
else:
|
||||
self._mirrors_for_spec[dag_hash].append(
|
||||
{
|
||||
"mirror_url": mirror_url,
|
||||
"spec": indexed_spec,
|
||||
}
|
||||
{"mirror_url": mirror_url, "spec": indexed_spec}
|
||||
)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
@@ -295,10 +298,7 @@ def update_spec(self, spec, found_list):
|
||||
break
|
||||
else:
|
||||
current_list.append(
|
||||
{
|
||||
"mirror_url": new_entry["mirror_url"],
|
||||
"spec": new_entry["spec"],
|
||||
}
|
||||
{"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]}
|
||||
)
|
||||
|
||||
def update(self, with_cooldown=False):
|
||||
@@ -366,8 +366,7 @@ def update(self, with_cooldown=False):
|
||||
# May need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url,
|
||||
cache_entry=cache_entry,
|
||||
cached_mirror_url, cache_entry=cache_entry
|
||||
)
|
||||
self._last_fetch_times[cached_mirror_url] = (now, True)
|
||||
all_methods_failed = False
|
||||
@@ -559,7 +558,12 @@ class NoChecksumException(spack.error.SpackError):
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super(NoChecksumException, self).__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
@@ -739,34 +743,31 @@ def get_buildfile_manifest(spec):
|
||||
return data
|
||||
|
||||
|
||||
def write_buildinfo_file(spec, workdir, rel=False):
|
||||
"""
|
||||
Create a cache file containing information
|
||||
required for the relocation
|
||||
"""
|
||||
def prefixes_to_hashes(spec):
|
||||
return {
|
||||
str(s.prefix): s.dag_hash()
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
prefix_to_hash = dict()
|
||||
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
|
||||
deps = spack.build_environment.get_rpath_deps(spec.package)
|
||||
for d in deps + spec.dependencies(deptype="run"):
|
||||
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
||||
|
||||
# Create buildinfo data and write it to disk
|
||||
buildinfo = {}
|
||||
buildinfo["sbang_install_path"] = spack.hooks.sbang.sbang_install_path()
|
||||
buildinfo["relative_rpaths"] = rel
|
||||
buildinfo["buildpath"] = spack.store.layout.root
|
||||
buildinfo["spackprefix"] = spack.paths.prefix
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
|
||||
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
|
||||
buildinfo["relocate_links"] = manifest["link_to_relocate"]
|
||||
buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"]
|
||||
buildinfo["prefix_to_hash"] = prefix_to_hash
|
||||
filename = buildinfo_file_name(workdir)
|
||||
with open(filename, "w") as outfile:
|
||||
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
"relocate_textfiles": manifest["text_to_relocate"],
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
"prefix_to_hash": prefixes_to_hashes(spec),
|
||||
}
|
||||
|
||||
|
||||
def tarball_directory_name(spec):
|
||||
@@ -1139,6 +1140,68 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gzip_compressed_tarfile(path):
|
||||
"""Create a reproducible, compressed tarfile"""
|
||||
# Create gzip compressed tarball of the install prefix
|
||||
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
|
||||
# If the filename="" is dropped, Python will use fileobj.name instead.
|
||||
# This should effectively mimick `gzip --no-name`.
|
||||
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with open(path, "wb") as fileobj, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
|
||||
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
|
||||
yield tar
|
||||
|
||||
|
||||
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
|
||||
# We only add files, symlinks, hardlinks, and directories
|
||||
# No character devices, block devices and FIFOs should ever enter a tarball.
|
||||
if tarinfo.isdev():
|
||||
return None
|
||||
|
||||
# For distribution, it makes no sense to user/group data; since (a) they don't exist
|
||||
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
|
||||
# ownership if it can. We want to extract as the current user. By setting owner to root,
|
||||
# root will extract as root, and non-privileged user will extract as themselves.
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
tarinfo.uname = ""
|
||||
tarinfo.gname = ""
|
||||
|
||||
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
|
||||
# touched; as long as the content does not change, this ensures we get stable tarballs.
|
||||
tarinfo.mtime = 0
|
||||
|
||||
# Normalize mode
|
||||
if tarinfo.isfile() or tarinfo.islnk():
|
||||
# If user can execute, use 0o755; else 0o644
|
||||
# This is to avoid potentially unsafe world writable & exeutable files that may get
|
||||
# extracted when Python or tar is run with privileges
|
||||
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
|
||||
else: # symbolic link and directories
|
||||
tarinfo.mode = 0o755
|
||||
|
||||
return tarinfo
|
||||
|
||||
|
||||
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(name=path)
|
||||
tarinfo.size = len(bstring)
|
||||
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
|
||||
|
||||
def _build_tarball(
|
||||
spec,
|
||||
out_url,
|
||||
@@ -1156,15 +1219,37 @@ def _build_tarball(
|
||||
if not spec.concrete:
|
||||
raise ValueError("spec must be concrete to build tarball")
|
||||
|
||||
# set up some paths
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
cache_prefix = build_cache_prefix(tmpdir)
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
_build_tarball_in_stage_dir(
|
||||
spec,
|
||||
out_url,
|
||||
stage_dir=tmpdir,
|
||||
force=force,
|
||||
relative=relative,
|
||||
unsigned=unsigned,
|
||||
allow_root=allow_root,
|
||||
key=key,
|
||||
regenerate_index=regenerate_index,
|
||||
)
|
||||
|
||||
|
||||
def _build_tarball_in_stage_dir(
|
||||
spec,
|
||||
out_url,
|
||||
stage_dir,
|
||||
force=False,
|
||||
relative=False,
|
||||
unsigned=False,
|
||||
allow_root=False,
|
||||
key=None,
|
||||
regenerate_index=False,
|
||||
):
|
||||
cache_prefix = build_cache_prefix(stage_dir)
|
||||
tarfile_name = tarball_name(spec, ".spack")
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, tmpdir))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, stage_dir))
|
||||
|
||||
mkdirp(tarfile_dir)
|
||||
if web_util.url_exists(remote_spackfile_path):
|
||||
@@ -1183,7 +1268,7 @@ def _build_tarball(
|
||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
out_url, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||
out_url, os.path.relpath(specfile_path, os.path.realpath(stage_dir))
|
||||
)
|
||||
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
||||
|
||||
@@ -1198,50 +1283,41 @@ def _build_tarball(
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
# make a copy of the install directory to work with
|
||||
workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
|
||||
# install_tree copies hardlinks
|
||||
# create a temporary tarfile from prefix and exract it to workdir
|
||||
# tarfile preserves hardlinks
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
write_buildinfo_file(spec, workdir, relative)
|
||||
buildinfo = get_buildinfo_dict(spec, relative)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if relative:
|
||||
try:
|
||||
make_package_relative(workdir, spec, allow_root)
|
||||
except Exception as e:
|
||||
shutil.rmtree(workdir)
|
||||
shutil.rmtree(tarfile_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
tty.die(e)
|
||||
else:
|
||||
try:
|
||||
check_package_relocatable(workdir, spec, allow_root)
|
||||
except Exception as e:
|
||||
shutil.rmtree(workdir)
|
||||
shutil.rmtree(tarfile_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
tty.die(e)
|
||||
make_package_relative(workdir, spec, buildinfo, allow_root)
|
||||
elif not allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# create gzip compressed tarball of the install prefix
|
||||
# On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
|
||||
tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
|
||||
# remove copy of install directory
|
||||
shutil.rmtree(workdir)
|
||||
if relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
@@ -1267,7 +1343,11 @@ def _build_tarball(
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
outfile.write(sjson.dump(spec_dict))
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
# If lines are longer, they are truncated without error. Thanks GPG!
|
||||
# So, here we still add newlines, but no indent, so save on file size and
|
||||
# line length.
|
||||
json.dump(spec_dict, outfile, indent=0, separators=(",", ":"))
|
||||
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
@@ -1284,73 +1364,61 @@ def _build_tarball(
|
||||
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'.format(spec, remote_spackfile_path))
|
||||
|
||||
try:
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
if not unsigned:
|
||||
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
if not unsigned:
|
||||
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=stage_dir)
|
||||
|
||||
# create an index.json for the build_cache directory so specs can be
|
||||
# found
|
||||
if regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, tmpdir)))
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
# create an index.json for the build_cache directory so specs can be
|
||||
# found
|
||||
if regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
|
||||
def nodes_to_be_packaged(specs, root=True, dependencies=True):
|
||||
"""Return the list of nodes to be packaged, given a list of specs.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): list of root specs to be processed
|
||||
include_root (bool): include the root of each spec in the nodes
|
||||
include_dependencies (bool): include the dependencies of each
|
||||
root (bool): include the root of each spec in the nodes
|
||||
dependencies (bool): include the dependencies of each
|
||||
spec in the nodes
|
||||
"""
|
||||
if not include_root and not include_dependencies:
|
||||
return set()
|
||||
if not root and not dependencies:
|
||||
return []
|
||||
elif dependencies:
|
||||
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
|
||||
else:
|
||||
nodes = set(specs)
|
||||
|
||||
def skip_node(current_node):
|
||||
if current_node.external or current_node.virtual:
|
||||
return True
|
||||
return spack.store.db.query_one(current_node) is None
|
||||
# Limit to installed non-externals.
|
||||
packageable = lambda n: not n.external and n.installed
|
||||
|
||||
expanded_set = set()
|
||||
for current_spec in specs:
|
||||
if not include_dependencies:
|
||||
nodes = [current_spec]
|
||||
else:
|
||||
nodes = [
|
||||
n
|
||||
for n in current_spec.traverse(
|
||||
order="post", root=include_root, deptype=("link", "run")
|
||||
)
|
||||
]
|
||||
|
||||
for node in nodes:
|
||||
if not skip_node(node):
|
||||
expanded_set.add(node)
|
||||
|
||||
return expanded_set
|
||||
# Mass install check
|
||||
with spack.store.db.read_transaction():
|
||||
return list(filter(packageable, nodes))
|
||||
|
||||
|
||||
def push(specs, push_url, specs_kwargs=None, **kwargs):
|
||||
def push(specs, push_url, include_root: bool = True, include_dependencies: bool = True, **kwargs):
|
||||
"""Create a binary package for each of the specs passed as input and push them
|
||||
to a given push URL.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): installed specs to be packaged
|
||||
push_url (str): url where to push the binary package
|
||||
specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
|
||||
and "include_dependencies", which determine which part of each spec is
|
||||
packaged and pushed to the mirror
|
||||
include_root (bool): include the root of each spec in the nodes
|
||||
include_dependencies (bool): include the dependencies of each
|
||||
spec in the nodes
|
||||
**kwargs: TODO
|
||||
|
||||
"""
|
||||
specs_kwargs = specs_kwargs or {"include_root": True, "include_dependencies": True}
|
||||
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
|
||||
# Be explicit about the arugment type
|
||||
if type(include_root) != bool or type(include_dependencies) != bool:
|
||||
raise ValueError("Expected include_root/include_dependencies to be True/False")
|
||||
|
||||
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
|
||||
|
||||
# TODO: This seems to be an easy target for task
|
||||
# TODO: distribution using a parallel pool
|
||||
@@ -1537,13 +1605,12 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, allow_root):
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
@@ -1567,16 +1634,10 @@ def make_package_relative(workdir, spec, allow_root):
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def check_package_relocatable(workdir, spec, allow_root):
|
||||
"""
|
||||
Check if package binaries are relocatable.
|
||||
Change links to placeholder links.
|
||||
"""
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
relocate.ensure_binaries_are_relocatable(binaries)
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
@@ -1779,14 +1840,15 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option."
|
||||
)
|
||||
# get the sha256 checksum of the tarball
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
expected = remote_checksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != remote_checksum["hash"]:
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n" "It cannot be installed."
|
||||
)
|
||||
if local_checksum != expected:
|
||||
size, contents = fsys.filesummary(tarfile_path)
|
||||
raise NoChecksumException(tarfile_path, size, contents, "sha256", expected, local_checksum)
|
||||
|
||||
return tarfile_path
|
||||
|
||||
@@ -1844,12 +1906,14 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
expected = bchecksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != bchecksum["hash"]:
|
||||
if local_checksum != expected:
|
||||
size, contents = fsys.filesummary(tarfile_path)
|
||||
_delete_staged_downloads(download_result)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n" "It cannot be installed."
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
)
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
|
||||
@@ -1940,8 +2004,11 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
tarball_path = download_result["tarball_stage"].save_filename
|
||||
msg = msg.format(tarball_path, sha256)
|
||||
if not checker.check(tarball_path):
|
||||
size, contents = fsys.filesummary(tarball_path)
|
||||
_delete_staged_downloads(download_result)
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
raise NoChecksumException(
|
||||
tarball_path, size, contents, checker.hash_name, sha256, checker.sum
|
||||
)
|
||||
tty.debug("Verified SHA256 checksum of the build cache")
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
@@ -2015,12 +2082,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
found_specs.append(
|
||||
{
|
||||
"mirror_url": mirror.fetch_url,
|
||||
"spec": fetched_spec,
|
||||
}
|
||||
)
|
||||
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
||||
|
||||
return found_specs
|
||||
|
||||
@@ -2322,11 +2384,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
||||
local_tarball_path = os.path.join(destination, tarball_dir_name)
|
||||
|
||||
files_to_fetch = [
|
||||
{
|
||||
"url": [tarball_path_name],
|
||||
"path": local_tarball_path,
|
||||
"required": True,
|
||||
},
|
||||
{"url": [tarball_path_name], "path": local_tarball_path, "required": True},
|
||||
{
|
||||
"url": [
|
||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||
@@ -2447,12 +2505,7 @@ def conditional_fetch(self):
|
||||
response.headers.get("Etag", None) or response.headers.get("etag", None)
|
||||
)
|
||||
|
||||
return FetchIndexResult(
|
||||
etag=etag,
|
||||
hash=computed_hash,
|
||||
data=result,
|
||||
fresh=False,
|
||||
)
|
||||
return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False)
|
||||
|
||||
|
||||
class EtagIndexFetcher:
|
||||
|
||||
@@ -5,11 +5,7 @@
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_core_dependencies,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
|
||||
@@ -59,10 +59,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
orders = [module_paths + sys.path, sys.path + module_paths]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
|
||||
@@ -53,12 +53,7 @@
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
|
||||
from ._common import (
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
@@ -213,7 +208,7 @@ def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, tes
|
||||
# This will be None for things that don't depend on python
|
||||
python_spec = item.get("python", None)
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
if not abstract_spec.intersects(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
|
||||
@@ -171,7 +171,7 @@ def mypy_root_spec():
|
||||
|
||||
def black_root_spec():
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:22.12.0")
|
||||
return _root_spec("py-black@:23.1.0")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
|
||||
@@ -69,13 +69,13 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
system_dirs,
|
||||
validate,
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
@@ -397,7 +397,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(system_dirs))
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
@@ -485,7 +485,13 @@ def update_compiler_args_for_dep(dep):
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = list()
|
||||
try:
|
||||
# In some circumstances (particularly for externals) finding
|
||||
# libraries packages can be time consuming, so indicate that
|
||||
# we are performing this operation (and also report when it
|
||||
# finishes).
|
||||
tty.debug("Collecting libraries for {0}".format(dep.name))
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
||||
except NoLibrariesError:
|
||||
tty.debug("No libraries found for {0}".format(dep.name))
|
||||
|
||||
@@ -772,7 +778,9 @@ def setup_package(pkg, dirty, context="build"):
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
# architecture specific setup
|
||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||
@@ -780,6 +788,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
|
||||
if context == "build":
|
||||
tty.debug("setup_package: setup build environment for root")
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_build_environment(env_mods)
|
||||
|
||||
@@ -790,6 +799,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
" includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
elif context == "test":
|
||||
tty.debug("setup_package: setup test environment for root")
|
||||
env_mods.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
@@ -806,6 +816,7 @@ def setup_package(pkg, dirty, context="build"):
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
tty.debug("setup_package: loading compiler modules")
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
@@ -943,6 +954,7 @@ def default_modifications_for_dep(dep):
|
||||
_make_runnable(dep, env)
|
||||
|
||||
def add_modifications_for_dep(dep):
|
||||
tty.debug("Adding env modifications for {0}".format(dep.name))
|
||||
# Some callers of this function only want the custom modifications.
|
||||
# For callers that want both custom and default modifications, we want
|
||||
# to perform the default modifications here (this groups custom
|
||||
@@ -968,6 +980,7 @@ def add_modifications_for_dep(dep):
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
else:
|
||||
dpkg.setup_dependent_run_environment(env, spec)
|
||||
tty.debug("Added env modifications for {0}".format(dep.name))
|
||||
|
||||
# Note that we want to perform environment modifications in a fixed order.
|
||||
# The Spec.traverse method provides this: i.e. in addition to
|
||||
@@ -1016,7 +1029,6 @@ def get_cmake_prefix_path(pkg):
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
|
||||
@@ -110,11 +110,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
phases = ("autoreconf", "configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = (
|
||||
"configure_args",
|
||||
"check",
|
||||
"installcheck",
|
||||
)
|
||||
legacy_methods = ("configure_args", "check", "installcheck")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
|
||||
@@ -31,7 +31,6 @@ def cmake_cache_option(name, boolean_value, comment=""):
|
||||
|
||||
|
||||
class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
#: Phases of a Cached CMake package
|
||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.path
|
||||
from spack.directives import build_system, depends_on, variant
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
@@ -35,6 +35,43 @@ def _extract_primary_generator(generator):
|
||||
return primary_generator
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
Currently, "Unix Makefiles" and "Ninja" are the only generators
|
||||
that Spack supports. Defaults to "Unix Makefiles".
|
||||
|
||||
See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
for more information.
|
||||
|
||||
Args:
|
||||
names: allowed generators for this package
|
||||
default: default generator
|
||||
"""
|
||||
allowed_values = ("make", "ninja")
|
||||
if any(x not in allowed_values for x in names):
|
||||
msg = "only 'make' and 'ninja' are allowed for CMake's 'generator' directive"
|
||||
raise ValueError(msg)
|
||||
|
||||
default = default or names[0]
|
||||
not_used = [x for x in allowed_values if x not in names]
|
||||
|
||||
def _values(x):
|
||||
return x in allowed_values
|
||||
|
||||
_values.__doc__ = f"{','.join(names)}"
|
||||
|
||||
variant(
|
||||
"generator",
|
||||
default=default,
|
||||
values=_values,
|
||||
description="the build system generator to use",
|
||||
)
|
||||
for x in not_used:
|
||||
conflicts(f"generator={x}")
|
||||
|
||||
|
||||
class CMakePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
@@ -67,8 +104,15 @@ class CMakePackage(spack.package_base.PackageBase):
|
||||
when="^cmake@3.9:",
|
||||
description="CMake interprocedural optimization",
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
generator("ninja")
|
||||
else:
|
||||
generator("ninja", "make", default="make")
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on("ninja", type="build", when="platform=windows")
|
||||
depends_on("gmake", type="build", when="generator=make")
|
||||
depends_on("ninja", type="build", when="generator=ninja")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
"""Return a list of all command line arguments to pass the specified
|
||||
@@ -138,18 +182,6 @@ class CMakeBuilder(BaseBuilder):
|
||||
| :py:meth:`~.CMakeBuilder.build_directory` | Directory where to |
|
||||
| | build the package |
|
||||
+-----------------------------------------------+--------------------+
|
||||
|
||||
The generator used by CMake can be specified by providing the ``generator``
|
||||
attribute. Per
|
||||
https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html,
|
||||
the format is: [<secondary-generator> - ]<primary_generator>.
|
||||
|
||||
The full list of primary and secondary generators supported by CMake may be found
|
||||
in the documentation for the version of CMake used; however, at this time Spack
|
||||
supports only the primary generators "Unix Makefiles" and "Ninja." Spack's CMake
|
||||
support is agnostic with respect to primary generators. Spack will generate a
|
||||
runtime error if the generator string does not follow the prescribed format, or if
|
||||
the primary generator is not supported.
|
||||
"""
|
||||
|
||||
#: Phases of a CMake package
|
||||
@@ -160,7 +192,6 @@ class CMakeBuilder(BaseBuilder):
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"generator",
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
"build_time_test_callbacks",
|
||||
@@ -171,16 +202,6 @@ class CMakeBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
#: The build system generator to use.
|
||||
#:
|
||||
#: See ``cmake --help`` for a list of valid generators.
|
||||
#: Currently, "Unix Makefiles" and "Ninja" are the only generators
|
||||
#: that Spack supports. Defaults to "Unix Makefiles".
|
||||
#:
|
||||
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
#: for more information.
|
||||
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
|
||||
#: Targets to be used during the build phase
|
||||
build_targets: List[str] = []
|
||||
#: Targets to be used during the install phase
|
||||
@@ -202,12 +223,20 @@ def root_cmakelists_dir(self):
|
||||
"""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self):
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
raise ValueError(msg)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
# standard CMake arguments
|
||||
std_cmake_args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
|
||||
std_cmake_args += getattr(self.pkg, "cmake_flag_args", [])
|
||||
return std_cmake_args
|
||||
@@ -252,10 +281,7 @@ def std_args(pkg, generator=None):
|
||||
|
||||
if platform.mac_ver()[0]:
|
||||
args.extend(
|
||||
[
|
||||
define("CMAKE_FIND_FRAMEWORK", "LAST"),
|
||||
define("CMAKE_FIND_APPBUNDLE", "LAST"),
|
||||
]
|
||||
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
|
||||
)
|
||||
|
||||
# Set up CMake rpath
|
||||
|
||||
@@ -38,10 +38,7 @@ class GenericBuilder(BaseBuilder):
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"archive_files",
|
||||
"install_time_test_callbacks",
|
||||
)
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for post-install phase tests
|
||||
install_time_test_callbacks = []
|
||||
|
||||
@@ -857,10 +857,7 @@ def scalapack_libs(self):
|
||||
raise_lib_error("Cannot find a BLACS library for the given MPI.")
|
||||
|
||||
int_suff = "_" + self.intel64_int_suffix
|
||||
scalapack_libnames = [
|
||||
"libmkl_scalapack" + int_suff,
|
||||
blacs_lib + int_suff,
|
||||
]
|
||||
scalapack_libnames = ["libmkl_scalapack" + int_suff, blacs_lib + int_suff]
|
||||
sca_libs = find_libraries(
|
||||
scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
|
||||
)
|
||||
@@ -1161,9 +1158,7 @@ def _determine_license_type(self):
|
||||
#
|
||||
# Ideally, we just tell the installer to look around on the system.
|
||||
# Thankfully, we neither need to care nor emulate where it looks:
|
||||
license_type = {
|
||||
"ACTIVATION_TYPE": "exist_lic",
|
||||
}
|
||||
license_type = {"ACTIVATION_TYPE": "exist_lic"}
|
||||
|
||||
# However (and only), if the spack-internal Intel license file has been
|
||||
# populated beyond its templated explanatory comments, proffer it to
|
||||
|
||||
@@ -68,10 +68,7 @@ def unpack(self, pkg, spec, prefix):
|
||||
|
||||
@staticmethod
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
spec = self.pkg.spec
|
||||
|
||||
@@ -37,11 +37,7 @@ class IntelOneApiPackage(Package):
|
||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||
|
||||
# Add variant to toggle environment modifications from vars.sh
|
||||
variant(
|
||||
"envmods",
|
||||
default=True,
|
||||
description="Toggles environment modifications",
|
||||
)
|
||||
variant("envmods", default=True, description="Toggles environment modifications")
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.version import Version
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers = ["adamjstewart"]
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -113,6 +113,9 @@ def view_file_conflicts(self, view, merge_map):
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
python_is_external = self.extendee_spec.external
|
||||
@@ -184,8 +187,6 @@ class PythonPackage(PythonExtension):
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi: Optional[str] = None
|
||||
|
||||
maintainers = ["adamjstewart", "pradyunsg"]
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = "PythonPackage"
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.directives import extends, maintainers
|
||||
|
||||
from .generic import GenericBuilder, Package
|
||||
|
||||
@@ -71,7 +71,7 @@ class RPackage(Package):
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
maintainers = ["glennpj"]
|
||||
maintainers("glennpj")
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
import spack.builder
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import build_system, extends
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
@@ -23,7 +23,7 @@ class RacketPackage(PackageBase):
|
||||
"""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
maintainers = ["elfprince13"]
|
||||
maintainers("elfprince13")
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = "RacketPackage"
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
class RubyPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for building Ruby gems."""
|
||||
|
||||
maintainers = ["Kerilk"]
|
||||
maintainers("Kerilk")
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
||||
@@ -61,10 +61,7 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec["python"].package.platlib,
|
||||
)
|
||||
root = os.path.join(self.prefix, self.spec["python"].package.platlib)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
|
||||
@@ -38,13 +38,12 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.error import SpackError
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
"always",
|
||||
]
|
||||
JOB_RETRY_CONDITIONS = ["always"]
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
@@ -129,10 +128,7 @@ def _remove_reserved_tags(tags):
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
"{name}{@version}",
|
||||
"{%compiler}",
|
||||
]
|
||||
format_elements = ["{name}{@version}", "{%compiler}"]
|
||||
|
||||
if spec.architecture:
|
||||
format_elements.append(" {arch=architecture}")
|
||||
@@ -328,12 +324,7 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
|
||||
dependencies = []
|
||||
|
||||
def append_dep(s, d):
|
||||
dependencies.append(
|
||||
{
|
||||
"spec": s,
|
||||
"depends": d,
|
||||
}
|
||||
)
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype=all):
|
||||
@@ -346,10 +337,7 @@ def append_dep(s, d):
|
||||
)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
"spec": s,
|
||||
"needs_rebuild": not up_to_date_mirrors,
|
||||
}
|
||||
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = _spec_deps_key(d)
|
||||
@@ -368,76 +356,13 @@ def append_dep(s, d):
|
||||
}
|
||||
)
|
||||
|
||||
deps_json_obj = {
|
||||
"specs": specs,
|
||||
"dependencies": dependencies,
|
||||
}
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
|
||||
return deps_json_obj
|
||||
|
||||
|
||||
def _spec_matches(spec, match_string):
|
||||
return spec.satisfies(match_string)
|
||||
|
||||
|
||||
def _remove_attributes(src_dict, dest_dict):
|
||||
if "tags" in src_dict and "tags" in dest_dict:
|
||||
# For 'tags', we remove any tags that are listed for removal
|
||||
for tag in src_dict["tags"]:
|
||||
while tag in dest_dict["tags"]:
|
||||
dest_dict["tags"].remove(tag)
|
||||
|
||||
|
||||
def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
for runner_attr in attrs_list:
|
||||
if runner_attr in src_dict:
|
||||
if runner_attr in dest_dict and runner_attr == "tags":
|
||||
# For 'tags', we combine the lists of tags, while
|
||||
# avoiding duplicates
|
||||
for tag in src_dict[runner_attr]:
|
||||
if tag not in dest_dict[runner_attr]:
|
||||
dest_dict[runner_attr].append(tag)
|
||||
elif runner_attr in dest_dict and runner_attr == "variables":
|
||||
# For 'variables', we merge the dictionaries. Any conflicts
|
||||
# (i.e. 'runner-attributes' has same variable key as the
|
||||
# higher level) we resolve by keeping the more specific
|
||||
# 'runner-attributes' version.
|
||||
for src_key, src_val in src_dict[runner_attr].items():
|
||||
dest_dict[runner_attr][src_key] = copy.deepcopy(src_dict[runner_attr][src_key])
|
||||
else:
|
||||
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
||||
|
||||
|
||||
def _find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = [
|
||||
"image",
|
||||
"tags",
|
||||
"variables",
|
||||
"before_script",
|
||||
"script",
|
||||
"after_script",
|
||||
]
|
||||
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
matched = False
|
||||
only_first = gitlab_ci.get("match_behavior", "first") == "first"
|
||||
for ci_mapping in gitlab_ci["mappings"]:
|
||||
for match_string in ci_mapping["match"]:
|
||||
if _spec_matches(spec, match_string):
|
||||
matched = True
|
||||
if "remove-attributes" in ci_mapping:
|
||||
_remove_attributes(ci_mapping["remove-attributes"], runner_attributes)
|
||||
if "runner-attributes" in ci_mapping:
|
||||
_copy_attributes(
|
||||
overridable_attrs, ci_mapping["runner-attributes"], runner_attributes
|
||||
)
|
||||
break
|
||||
if matched and only_first:
|
||||
break
|
||||
|
||||
return runner_attributes if matched else None
|
||||
return spec.intersects(match_string)
|
||||
|
||||
|
||||
def _format_job_needs(
|
||||
@@ -513,16 +438,28 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def get_spec_filter_list(env, affected_pkgs):
|
||||
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
"""Given a list of package names and an active/concretized
|
||||
environment, return the set of all concrete specs from the
|
||||
environment that could have been affected by changing the
|
||||
list of packages.
|
||||
|
||||
If a ``dependent_traverse_depth`` is given, it is used to limit
|
||||
upward (in the parent direction) traversal of specs of touched
|
||||
packages. E.g. if 1 is provided, then only direct dependents
|
||||
of touched package specs are traversed to produce specs that
|
||||
could have been affected by changing the package, while if 0 is
|
||||
provided, only the changed specs themselves are traversed. If ``None``
|
||||
is given, upward traversal of touched package specs is done all
|
||||
the way to the environment roots. Providing a negative number
|
||||
results in no traversals at all, yielding an empty set.
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Active concrete environment
|
||||
affected_pkgs (List[str]): Affected package names
|
||||
dependent_traverse_depth: Optional integer to limit dependent
|
||||
traversal, or None to disable the limit.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -535,17 +472,237 @@ def get_spec_filter_list(env, affected_pkgs):
|
||||
tty.debug("All concrete environment specs:")
|
||||
for s in all_concrete_specs:
|
||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||
env_matches = [s for s in all_concrete_specs if s.name in frozenset(affected_pkgs)]
|
||||
affected_pkgs = frozenset(affected_pkgs)
|
||||
env_matches = [s for s in all_concrete_specs if s.name in affected_pkgs]
|
||||
visited = set()
|
||||
dag_hash = lambda s: s.dag_hash()
|
||||
for match in env_matches:
|
||||
for parent in match.traverse(direction="parents", key=dag_hash):
|
||||
affected_specs.update(
|
||||
parent.traverse(direction="children", visited=visited, key=dag_hash)
|
||||
)
|
||||
for depth, parent in traverse.traverse_nodes(
|
||||
env_matches, direction="parents", key=dag_hash, depth=True, order="breadth"
|
||||
):
|
||||
if dependent_traverse_depth is not None and depth > dependent_traverse_depth:
|
||||
break
|
||||
affected_specs.update(parent.traverse(direction="children", visited=visited, key=dag_hash))
|
||||
return affected_specs
|
||||
|
||||
|
||||
def _build_jobs(phases, staged_phases):
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
release_spec = spec_record["spec"]
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
yield release_spec, release_spec_dag_hash
|
||||
|
||||
|
||||
def _noop(x):
|
||||
return x
|
||||
|
||||
|
||||
def _unpack_script(script_section, op=_noop):
|
||||
script = []
|
||||
for cmd in script_section:
|
||||
if isinstance(cmd, list):
|
||||
for subcmd in cmd:
|
||||
script.append(op(subcmd))
|
||||
else:
|
||||
script.append(op(cmd))
|
||||
|
||||
return script
|
||||
|
||||
|
||||
class SpackCI:
|
||||
"""Spack CI object used to generate intermediate representation
|
||||
used by the CI generator(s).
|
||||
"""
|
||||
|
||||
def __init__(self, ci_config, phases, staged_phases):
|
||||
"""Given the information from the ci section of the config
|
||||
and the job phases setup meta data needed for generating Spack
|
||||
CI IR.
|
||||
"""
|
||||
|
||||
self.ci_config = ci_config
|
||||
self.named_jobs = ["any", "build", "cleanup", "noop", "reindex", "signing"]
|
||||
|
||||
self.ir = {
|
||||
"jobs": {},
|
||||
"temporary-storage-url-prefix": self.ci_config.get(
|
||||
"temporary-storage-url-prefix", None
|
||||
),
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"bootstrap": self.ci_config.get(
|
||||
"bootstrap", []
|
||||
), # This is deprecated and should be removed
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
"target": self.ci_config.get("target", "gitlab"),
|
||||
}
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
for spec, dag_hash in _build_jobs(phases, staged_phases):
|
||||
jobs[dag_hash] = self.__init_job(spec)
|
||||
|
||||
for name in self.named_jobs:
|
||||
# Skip the special named jobs
|
||||
if name not in ["any", "build"]:
|
||||
jobs[name] = self.__init_job("")
|
||||
|
||||
def __init_job(self, spec):
|
||||
"""Initialize job object"""
|
||||
return {"spec": spec, "attributes": {}}
|
||||
|
||||
def __is_named(self, section):
|
||||
"""Check if a pipeline-gen configuration section is for a named job,
|
||||
and if so return the name otherwise return none.
|
||||
"""
|
||||
for _name in self.named_jobs:
|
||||
keys = ["{0}-job".format(_name), "{0}-job-remove".format(_name)]
|
||||
if any([key for key in keys if key in section]):
|
||||
return _name
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
jname = "{0}-job{1}".format(name, suffix)
|
||||
else:
|
||||
jname = "{0}-job".format(name)
|
||||
|
||||
return jname
|
||||
|
||||
def __apply_submapping(self, dest, spec, section):
|
||||
"""Apply submapping setion to the IR dict"""
|
||||
matched = False
|
||||
only_first = section.get("match_behavior", "first") == "first"
|
||||
|
||||
for match_attrs in reversed(section["submapping"]):
|
||||
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
|
||||
for match_string in match_attrs["match"]:
|
||||
if _spec_matches(spec, match_string):
|
||||
matched = True
|
||||
if "build-job-remove" in match_attrs:
|
||||
spack.config.remove_yaml(dest, attrs["build-job-remove"])
|
||||
if "build-job" in match_attrs:
|
||||
spack.config.merge_yaml(dest, attrs["build-job"])
|
||||
break
|
||||
if matched and only_first:
|
||||
break
|
||||
|
||||
return dest
|
||||
|
||||
# Generate IR from the configs
|
||||
def generate_ir(self):
|
||||
"""Generate the IR from the Spack CI configurations."""
|
||||
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
# Implicit job defaults
|
||||
defaults = [
|
||||
{
|
||||
"build-job": {
|
||||
"script": [
|
||||
"cd {env_dir}",
|
||||
"spack env activate --without-view .",
|
||||
"spack ci rebuild",
|
||||
]
|
||||
}
|
||||
},
|
||||
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
|
||||
]
|
||||
|
||||
# Job overrides
|
||||
overrides = [
|
||||
# Reindex script
|
||||
{
|
||||
"reindex-job": {
|
||||
"script:": [
|
||||
"spack buildcache update-index --keys --mirror-url {index_target_mirror}"
|
||||
]
|
||||
}
|
||||
},
|
||||
# Cleanup script
|
||||
{
|
||||
"cleanup-job": {
|
||||
"script:": [
|
||||
"spack -d mirror destroy --mirror-url {mirror_prefix}/$CI_PIPELINE_ID"
|
||||
]
|
||||
}
|
||||
},
|
||||
# Add signing job tags
|
||||
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
|
||||
# Remove reserved tags
|
||||
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
|
||||
]
|
||||
|
||||
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
|
||||
|
||||
for section in reversed(pipeline_gen):
|
||||
name = self.__is_named(section)
|
||||
has_submapping = "submapping" in section
|
||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||
|
||||
if name:
|
||||
remove_job_name = self.__job_name(name, suffix="-remove")
|
||||
merge_job_name = self.__job_name(name)
|
||||
do_remove = remove_job_name in section
|
||||
do_merge = merge_job_name in section
|
||||
|
||||
def _apply_section(dest, src):
|
||||
if do_remove:
|
||||
dest = spack.config.remove_yaml(dest, src[remove_job_name])
|
||||
if do_merge:
|
||||
dest = copy.copy(spack.config.merge_yaml(dest, src[merge_job_name]))
|
||||
|
||||
if name == "build":
|
||||
# Apply attributes to all build jobs
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
_apply_section(job["attributes"], section)
|
||||
elif name == "any":
|
||||
# Apply section attributes too all jobs
|
||||
for _, job in jobs.items():
|
||||
_apply_section(job["attributes"], section)
|
||||
else:
|
||||
# Create a signing job if there is script and the job hasn't
|
||||
# been initialized yet
|
||||
if name == "signing" and name not in jobs:
|
||||
if "signing-job" in section:
|
||||
if "script" not in section["signing-job"]:
|
||||
continue
|
||||
else:
|
||||
jobs[name] = self.__init_job("")
|
||||
# Apply attributes to named job
|
||||
_apply_section(jobs[name]["attributes"], section)
|
||||
|
||||
elif has_submapping:
|
||||
# Apply section jobs with specs to match
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
job["attributes"] = self.__apply_submapping(
|
||||
job["attributes"], job["spec"], section
|
||||
)
|
||||
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
job["spec"] = job["spec"].name
|
||||
|
||||
return self.ir
|
||||
|
||||
|
||||
def generate_gitlab_ci_yaml(
|
||||
env,
|
||||
print_summary,
|
||||
@@ -595,14 +752,32 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
|
||||
if "gitlab-ci" not in yaml_root:
|
||||
tty.die('Environment yaml does not have "gitlab-ci" section')
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
gitlab_ci = yaml_root["gitlab-ci"]
|
||||
if not ci_config:
|
||||
tty.die('Environment yaml does not have "ci" section')
|
||||
|
||||
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if "target" in ci_config and ci_config["target"] != "gitlab":
|
||||
tty.die('Spack CI module only generates target "gitlab"')
|
||||
|
||||
cdash_config = cfg.get("cdash")
|
||||
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
||||
build_group = cdash_handler.build_group if cdash_handler else None
|
||||
|
||||
dependent_depth = os.environ.get("SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH", None)
|
||||
if dependent_depth is not None:
|
||||
try:
|
||||
dependent_depth = int(dependent_depth)
|
||||
except (TypeError, ValueError):
|
||||
tty.warn(
|
||||
f"Unrecognized value ({dependent_depth}) "
|
||||
"provided for SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH, "
|
||||
"ignoring it."
|
||||
)
|
||||
dependent_depth = None
|
||||
|
||||
prune_untouched_packages = False
|
||||
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
||||
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
|
||||
@@ -618,7 +793,9 @@ def generate_gitlab_ci_yaml(
|
||||
tty.debug("affected pkgs:")
|
||||
for p in affected_pkgs:
|
||||
tty.debug(" {0}".format(p))
|
||||
affected_specs = get_spec_filter_list(env, affected_pkgs)
|
||||
affected_specs = get_spec_filter_list(
|
||||
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
||||
)
|
||||
tty.debug("all affected specs:")
|
||||
for s in affected_specs:
|
||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||
@@ -660,53 +837,39 @@ def generate_gitlab_ci_yaml(
|
||||
# trying to build.
|
||||
broken_specs_url = ""
|
||||
known_broken_specs_encountered = []
|
||||
if "broken-specs-url" in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci["broken-specs-url"]
|
||||
if "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in gitlab_ci:
|
||||
enable_artifacts_buildcache = gitlab_ci["enable-artifacts-buildcache"]
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
if "rebuild-index" in gitlab_ci and gitlab_ci["rebuild-index"] is False:
|
||||
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
||||
rebuild_index_enabled = False
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in gitlab_ci:
|
||||
temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
bootstrap_specs = []
|
||||
phases = []
|
||||
if "bootstrap" in gitlab_ci:
|
||||
for phase in gitlab_ci["bootstrap"]:
|
||||
if "bootstrap" in ci_config:
|
||||
for phase in ci_config["bootstrap"]:
|
||||
try:
|
||||
phase_name = phase.get("name")
|
||||
strip_compilers = phase.get("compiler-agnostic")
|
||||
except AttributeError:
|
||||
phase_name = phase
|
||||
strip_compilers = False
|
||||
phases.append(
|
||||
{
|
||||
"name": phase_name,
|
||||
"strip-compilers": strip_compilers,
|
||||
}
|
||||
)
|
||||
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
||||
|
||||
for bs in env.spec_lists[phase_name]:
|
||||
bootstrap_specs.append(
|
||||
{
|
||||
"spec": bs,
|
||||
"phase-name": phase_name,
|
||||
"strip-compilers": strip_compilers,
|
||||
}
|
||||
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
||||
)
|
||||
|
||||
phases.append(
|
||||
{
|
||||
"name": "specs",
|
||||
"strip-compilers": False,
|
||||
}
|
||||
)
|
||||
phases.append({"name": "specs", "strip-compilers": False})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
@@ -757,6 +920,27 @@ def generate_gitlab_ci_yaml(
|
||||
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||
shutil.copyfile(env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
|
||||
|
||||
with open(env.manifest_path, "r") as env_fd:
|
||||
env_yaml_root = syaml.load(env_fd)
|
||||
# Add config scopes to environment
|
||||
env_includes = env_yaml_root["spack"].get("include", [])
|
||||
cli_scopes = [
|
||||
os.path.abspath(s.path)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
include_scopes = []
|
||||
for scope in cli_scopes:
|
||||
if scope not in include_scopes and scope not in env_includes:
|
||||
include_scopes.insert(0, scope)
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = env_includes
|
||||
|
||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
@@ -768,7 +952,7 @@ def generate_gitlab_ci_yaml(
|
||||
# generation job and the rebuild jobs. This can happen when gitlab
|
||||
# checks out the project into a runner-specific directory, for example,
|
||||
# and different runners are picked for generate and rebuild jobs.
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||
rel_artifacts_root = os.path.relpath(pipeline_artifacts_dir, ci_project_dir)
|
||||
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
@@ -782,7 +966,7 @@ def generate_gitlab_ci_yaml(
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
tty.warn(e)
|
||||
|
||||
staged_phases = {}
|
||||
try:
|
||||
@@ -839,6 +1023,9 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||
|
||||
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
||||
spack_ci_ir = spack_ci.generate_ir()
|
||||
|
||||
before_script, after_script = None, None
|
||||
for phase in phases:
|
||||
phase_name = phase["name"]
|
||||
@@ -866,7 +1053,7 @@ def generate_gitlab_ci_yaml(
|
||||
spec_record["needs_rebuild"] = False
|
||||
continue
|
||||
|
||||
runner_attribs = _find_matching_config(release_spec, gitlab_ci)
|
||||
runner_attribs = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
if not runner_attribs:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
@@ -897,23 +1084,21 @@ def generate_gitlab_ci_yaml(
|
||||
except AttributeError:
|
||||
image_name = build_image
|
||||
|
||||
job_script = ["spack env activate --without-view ."]
|
||||
if "script" not in runner_attribs:
|
||||
raise AttributeError
|
||||
|
||||
if artifacts_root:
|
||||
job_script.insert(0, "cd {0}".format(concrete_env_dir))
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
||||
|
||||
job_script.extend(["spack ci rebuild"])
|
||||
|
||||
if "script" in runner_attribs:
|
||||
job_script = [s for s in runner_attribs["script"]]
|
||||
job_script = _unpack_script(runner_attribs["script"], op=main_script_replacements)
|
||||
|
||||
before_script = None
|
||||
if "before_script" in runner_attribs:
|
||||
before_script = [s for s in runner_attribs["before_script"]]
|
||||
before_script = _unpack_script(runner_attribs["before_script"])
|
||||
|
||||
after_script = None
|
||||
if "after_script" in runner_attribs:
|
||||
after_script = [s for s in runner_attribs["after_script"]]
|
||||
after_script = _unpack_script(runner_attribs["after_script"])
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(
|
||||
@@ -975,7 +1160,7 @@ def generate_gitlab_ci_yaml(
|
||||
bs_arch = c_spec.architecture
|
||||
bs_arch_family = bs_arch.target.microarchitecture.family
|
||||
if (
|
||||
c_spec.satisfies(compiler_pkg_spec)
|
||||
c_spec.intersects(compiler_pkg_spec)
|
||||
and bs_arch_family == spec_arch_family
|
||||
):
|
||||
# We found the bootstrap compiler this release spec
|
||||
@@ -1109,15 +1294,9 @@ def generate_gitlab_ci_yaml(
|
||||
"variables": variables,
|
||||
"script": job_script,
|
||||
"tags": tags,
|
||||
"artifacts": {
|
||||
"paths": artifact_paths,
|
||||
"when": "always",
|
||||
},
|
||||
"artifacts": {"paths": artifact_paths, "when": "always"},
|
||||
"needs": sorted(job_dependencies, key=lambda d: d["job"]),
|
||||
"retry": {
|
||||
"max": 2,
|
||||
"when": JOB_RETRY_CONDITIONS,
|
||||
},
|
||||
"retry": {"max": 2, "when": JOB_RETRY_CONDITIONS},
|
||||
"interruptible": True,
|
||||
}
|
||||
|
||||
@@ -1135,10 +1314,7 @@ def generate_gitlab_ci_yaml(
|
||||
if image_name:
|
||||
job_object["image"] = image_name
|
||||
if image_entry is not None:
|
||||
job_object["image"] = {
|
||||
"name": image_name,
|
||||
"entrypoint": image_entry,
|
||||
}
|
||||
job_object["image"] = {"name": image_name, "entrypoint": image_entry}
|
||||
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
@@ -1166,26 +1342,9 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
|
||||
service_job_config = None
|
||||
if "service-job-attributes" in gitlab_ci:
|
||||
service_job_config = gitlab_ci["service-job-attributes"]
|
||||
|
||||
default_attrs = [
|
||||
"image",
|
||||
"tags",
|
||||
"variables",
|
||||
"before_script",
|
||||
# 'script',
|
||||
"after_script",
|
||||
]
|
||||
|
||||
service_job_retries = {
|
||||
"max": 2,
|
||||
"when": [
|
||||
"runner_system_failure",
|
||||
"stuck_or_timeout_failure",
|
||||
"script_failure",
|
||||
],
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if job_id > 0:
|
||||
@@ -1194,55 +1353,29 @@ def generate_gitlab_ci_yaml(
|
||||
# schedule a job to clean up the temporary storage location
|
||||
# associated with this pipeline.
|
||||
stage_names.append("cleanup-temp-storage")
|
||||
cleanup_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs, service_job_config, cleanup_job)
|
||||
|
||||
if "tags" in cleanup_job:
|
||||
service_tags = _remove_reserved_tags(cleanup_job["tags"])
|
||||
cleanup_job["tags"] = service_tags
|
||||
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
|
||||
|
||||
cleanup_job["stage"] = "cleanup-temp-storage"
|
||||
cleanup_job["script"] = [
|
||||
"spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID".format(
|
||||
temp_storage_url_prefix
|
||||
)
|
||||
]
|
||||
cleanup_job["when"] = "always"
|
||||
cleanup_job["retry"] = service_job_retries
|
||||
cleanup_job["interruptible"] = True
|
||||
|
||||
cleanup_job["script"] = _unpack_script(
|
||||
cleanup_job["script"],
|
||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||
)
|
||||
|
||||
output_object["cleanup"] = cleanup_job
|
||||
|
||||
if (
|
||||
"signing-job-attributes" in gitlab_ci
|
||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
and spack_pipeline_type == "spack_protected_branch"
|
||||
):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append("stage-sign-pkgs")
|
||||
signing_job_config = gitlab_ci["signing-job-attributes"]
|
||||
signing_job = {}
|
||||
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
|
||||
signing_job_attrs_to_copy = [
|
||||
"image",
|
||||
"tags",
|
||||
"variables",
|
||||
"before_script",
|
||||
"script",
|
||||
"after_script",
|
||||
]
|
||||
|
||||
_copy_attributes(signing_job_attrs_to_copy, signing_job_config, signing_job)
|
||||
|
||||
signing_job_tags = []
|
||||
if "tags" in signing_job:
|
||||
signing_job_tags = _remove_reserved_tags(signing_job["tags"])
|
||||
|
||||
for tag in ["aws", "protected", "notary"]:
|
||||
if tag not in signing_job_tags:
|
||||
signing_job_tags.append(tag)
|
||||
signing_job["tags"] = signing_job_tags
|
||||
signing_job["script"] = _unpack_script(signing_job["script"])
|
||||
|
||||
signing_job["stage"] = "stage-sign-pkgs"
|
||||
signing_job["when"] = "always"
|
||||
@@ -1254,23 +1387,17 @@ def generate_gitlab_ci_yaml(
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append("stage-rebuild-index")
|
||||
final_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs, service_job_config, final_job)
|
||||
|
||||
if "tags" in final_job:
|
||||
service_tags = _remove_reserved_tags(final_job["tags"])
|
||||
final_job["tags"] = service_tags
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
final_job["script"] = [
|
||||
"spack buildcache update-index --keys --mirror-url {0}".format(index_target_mirror)
|
||||
]
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror),
|
||||
)
|
||||
|
||||
final_job["when"] = "always"
|
||||
final_job["retry"] = service_job_retries
|
||||
final_job["interruptible"] = True
|
||||
@@ -1351,15 +1478,7 @@ def generate_gitlab_ci_yaml(
|
||||
else:
|
||||
# No jobs were generated
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
noop_job = {}
|
||||
|
||||
if service_job_config:
|
||||
_copy_attributes(default_attrs, service_job_config, noop_job)
|
||||
|
||||
if "script" not in noop_job:
|
||||
noop_job["script"] = [
|
||||
'echo "All specs already up to date, nothing to rebuild."',
|
||||
]
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
|
||||
noop_job["retry"] = service_job_retries
|
||||
|
||||
@@ -1373,7 +1492,7 @@ def generate_gitlab_ci_yaml(
|
||||
sys.exit(1)
|
||||
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
@@ -1489,9 +1608,8 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||
hashes = env.all_hashes() if env else None
|
||||
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
|
||||
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
||||
spec_kwargs = {"include_root": True, "include_dependencies": False}
|
||||
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
|
||||
bindist.push(matches, push_url, spec_kwargs, **kwargs)
|
||||
bindist.push(matches, push_url, include_root=True, include_dependencies=False, **kwargs)
|
||||
|
||||
|
||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
@@ -1557,7 +1675,7 @@ def copy_files_to_artifacts(src, artifacts_dir):
|
||||
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
||||
src, artifacts_dir, str(err)
|
||||
)
|
||||
tty.error(msg)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
@@ -1620,9 +1738,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
"""
|
||||
tty.msg("Fetching artifacts from: {0}\n".format(url))
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/zip",
|
||||
}
|
||||
headers = {"Content-Type": "application/zip"}
|
||||
|
||||
token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
|
||||
if token:
|
||||
@@ -1779,6 +1895,7 @@ def reproduce_ci_job(url, work_dir):
|
||||
function is a set of printed instructions for running docker and then
|
||||
commands to run to reproduce the build once inside the container.
|
||||
"""
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
@@ -1943,7 +2060,9 @@ def reproduce_ci_job(url, work_dir):
|
||||
if job_image:
|
||||
inst_list.append("\nRun the following command:\n\n")
|
||||
inst_list.append(
|
||||
" $ docker run --rm -v {0}:{1} -ti {2}\n".format(work_dir, mount_as_dir, job_image)
|
||||
" $ docker run --rm --name spack_reproducer -v {0}:{1}:Z -ti {2}\n".format(
|
||||
work_dir, mount_as_dir, job_image
|
||||
)
|
||||
)
|
||||
inst_list.append("\nOnce inside the container:\n\n")
|
||||
else:
|
||||
@@ -1994,13 +2113,16 @@ def process_command(name, commands, repro_dir):
|
||||
# Create a string [command 1] && [command 2] && ... && [command n] with commands
|
||||
# quoted using double quotes.
|
||||
args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args)
|
||||
full_command = " && ".join(map(args_to_string, commands))
|
||||
full_command = " \n ".join(map(args_to_string, commands))
|
||||
|
||||
# Write the command to a shell script
|
||||
script = "{0}.sh".format(name)
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/sh\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(name))
|
||||
fd.write("set -e\n")
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
fd.write("set -x\n")
|
||||
fd.write(full_command)
|
||||
fd.write("\n")
|
||||
|
||||
@@ -2081,10 +2203,7 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
with open(file_path, "w") as fd:
|
||||
fd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
file_path,
|
||||
url,
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
@@ -2162,14 +2281,7 @@ def run_standalone_tests(**kwargs):
|
||||
tty.error("Reproduction directory is required for stand-alone tests")
|
||||
return
|
||||
|
||||
test_args = [
|
||||
"spack",
|
||||
"--color=always",
|
||||
"--backtrace",
|
||||
"--verbose",
|
||||
"test",
|
||||
"run",
|
||||
]
|
||||
test_args = ["spack", "--color=always", "--backtrace", "--verbose", "test", "run"]
|
||||
if fail_fast:
|
||||
test_args.append("--fail-fast")
|
||||
|
||||
@@ -2319,19 +2431,9 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
self.build_group,
|
||||
"Daily",
|
||||
)
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
"Latest {0}".format(self.build_group),
|
||||
"Latest",
|
||||
opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
@@ -2341,13 +2443,9 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
data = {
|
||||
"dynamiclist": [
|
||||
{
|
||||
"match": name,
|
||||
"parentgroupid": parent_group_id,
|
||||
"site": self.site,
|
||||
}
|
||||
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
|
||||
for name in job_names
|
||||
],
|
||||
]
|
||||
}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
@@ -43,7 +43,6 @@ def matches(obj, proto):
|
||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
|
||||
@@ -161,9 +161,7 @@ class _UnquotedFlags(object):
|
||||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
"|".join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
)
|
||||
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
|
||||
)
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||
@@ -227,7 +225,6 @@ def parse_specs(args, **kwargs):
|
||||
return specs
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
|
||||
@@ -53,7 +53,6 @@ def packages(parser, args):
|
||||
|
||||
|
||||
def packages_https(parser, args):
|
||||
|
||||
# Since packages takes a long time, --all is required without name
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
|
||||
description = (
|
||||
"run a command in a spec's install environment, " "or dump its environment to screen or file"
|
||||
"run a command in a spec's install environment, or dump its environment to screen or file"
|
||||
)
|
||||
section = "build"
|
||||
level = "long"
|
||||
|
||||
@@ -103,9 +103,7 @@ def setup_parser(subparser):
|
||||
help="Regenerate buildcache index after building package(s)",
|
||||
)
|
||||
create.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="Create buildcache entry for spec from json or yaml file",
|
||||
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
create.add_argument(
|
||||
"--only",
|
||||
@@ -402,7 +400,7 @@ def _matching_specs(specs, spec_file):
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [env.specs_by_hash[h] for h in env.concretized_order]
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
@@ -461,10 +459,6 @@ def create_fn(args):
|
||||
|
||||
msg = "Pushing binary packages to {0}/build_cache".format(url)
|
||||
tty.msg(msg)
|
||||
specs_kwargs = {
|
||||
"include_root": "package" in args.things_to_install,
|
||||
"include_dependencies": "dependencies" in args.things_to_install,
|
||||
}
|
||||
kwargs = {
|
||||
"key": args.key,
|
||||
"force": args.force,
|
||||
@@ -473,7 +467,13 @@ def create_fn(args):
|
||||
"allow_root": args.allow_root,
|
||||
"regenerate_index": args.rebuild_index,
|
||||
}
|
||||
bindist.push(matches, url, specs_kwargs, **kwargs)
|
||||
bindist.push(
|
||||
matches,
|
||||
url,
|
||||
include_root="package" in args.things_to_install,
|
||||
include_dependencies="dependencies" in args.things_to_install,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
@@ -498,11 +498,11 @@ def list_fn(args):
|
||||
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
specs = [s for s in specs if s.intersects(arch)]
|
||||
|
||||
if args.specs:
|
||||
constraints = set(args.specs)
|
||||
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
|
||||
specs = [s for s in specs if any(s.intersects(c) for c in constraints)]
|
||||
if sys.stdout.isatty():
|
||||
builds = len(specs)
|
||||
tty.msg("%s." % plural(builds, "cached build"))
|
||||
|
||||
@@ -20,9 +20,7 @@ def setup_parser(subparser):
|
||||
help="name of the list to remove specs from",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--match-spec",
|
||||
dest="match_spec",
|
||||
help="if name is ambiguous, supply a spec to match",
|
||||
"--match-spec", dest="match_spec", help="if name is ambiguous, supply a spec to match"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
|
||||
@@ -33,12 +33,6 @@ def deindent(desc):
|
||||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def get_env_var(variable_name):
|
||||
if variable_name in os.environ:
|
||||
return os.environ.get(variable_name)
|
||||
return None
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
@@ -255,10 +249,9 @@ def ci_rebuild(args):
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
# to do.
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
gitlab_ci = yaml_root["gitlab-ci"] if "gitlab-ci" in yaml_root else None
|
||||
if not gitlab_ci:
|
||||
tty.die("spack ci rebuild requires an env containing gitlab-ci cfg")
|
||||
ci_config = cfg.get("ci")
|
||||
if not ci_config:
|
||||
tty.die("spack ci rebuild requires an env containing ci cfg")
|
||||
|
||||
tty.msg(
|
||||
"SPACK_BUILDCACHE_DESTINATION={0}".format(
|
||||
@@ -269,27 +262,27 @@ def ci_rebuild(args):
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
pipeline_artifacts_dir = get_env_var("SPACK_ARTIFACTS_ROOT")
|
||||
job_log_dir = get_env_var("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = get_env_var("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = get_env_var("SPACK_JOB_REPRO_DIR")
|
||||
local_mirror_dir = get_env_var("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = get_env_var("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = get_env_var("CI_PIPELINE_ID")
|
||||
ci_job_name = get_env_var("CI_JOB_NAME")
|
||||
signing_key = get_env_var("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = get_env_var("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = get_env_var("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = get_env_var("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING")
|
||||
pipeline_artifacts_dir = os.environ.get("SPACK_ARTIFACTS_ROOT")
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
ci_job_name = os.environ.get("CI_JOB_NAME")
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
ci_project_dir = get_env_var("CI_PROJECT_DIR")
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||
pipeline_artifacts_dir = os.path.join(ci_project_dir, pipeline_artifacts_dir)
|
||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||
@@ -306,8 +299,10 @@ def ci_rebuild(args):
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
# CDash instance, and if so, gather some information from the manifest to
|
||||
# support that task.
|
||||
cdash_handler = spack_ci.CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||
if cdash_handler:
|
||||
cdash_config = cfg.get("cdash")
|
||||
cdash_handler = None
|
||||
if "build-group" in cdash_config:
|
||||
cdash_handler = spack_ci.CDashHandler(cdash_config)
|
||||
tty.debug("cdash url = {0}".format(cdash_handler.url))
|
||||
tty.debug("cdash project = {0}".format(cdash_handler.project))
|
||||
tty.debug("cdash project_enc = {0}".format(cdash_handler.project_enc))
|
||||
@@ -340,13 +335,13 @@ def ci_rebuild(args):
|
||||
pipeline_mirror_url = None
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in gitlab_ci:
|
||||
temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in gitlab_ci:
|
||||
enable_artifacts_mirror = gitlab_ci["enable-artifacts-buildcache"]
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
if enable_artifacts_mirror or (
|
||||
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
|
||||
):
|
||||
@@ -530,39 +525,28 @@ def ci_rebuild(args):
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
install_args.extend(cdash_handler.args())
|
||||
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing dependencies from cache
|
||||
deps_install_args = install_args
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
root_install_args = install_args + [
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
slash_hash,
|
||||
]
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
root_install_args.extend(cdash_handler.args())
|
||||
root_install_args.append(slash_hash)
|
||||
|
||||
# ["x", "y"] -> "'x' 'y'"
|
||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"bootstrap",
|
||||
"now",
|
||||
],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"config",
|
||||
"add",
|
||||
"config:db_lock_timeout:120", # 2 minutes for processes to fight for a db lock
|
||||
],
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
@@ -604,8 +588,8 @@ def ci_rebuild(args):
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug("Install failed on develop")
|
||||
if "broken-specs-url" in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci["broken-specs-url"]
|
||||
if "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
dev_fail_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg("Reporting broken develop build as: {0}".format(broken_spec_path))
|
||||
@@ -613,8 +597,8 @@ def ci_rebuild(args):
|
||||
broken_spec_path,
|
||||
job_spec_pkg_name,
|
||||
spack_ci_stack_name,
|
||||
get_env_var("CI_JOB_URL"),
|
||||
get_env_var("CI_PIPELINE_URL"),
|
||||
os.environ.get("CI_JOB_URL"),
|
||||
os.environ.get("CI_PIPELINE_URL"),
|
||||
job_spec.to_dict(hash=ht.dag_hash),
|
||||
)
|
||||
|
||||
@@ -626,17 +610,14 @@ def ci_rebuild(args):
|
||||
# the package, run them and copy the output. Failures of any kind should
|
||||
# *not* terminate the build process or preclude creating the build cache.
|
||||
broken_tests = (
|
||||
"broken-tests-packages" in gitlab_ci
|
||||
and job_spec.name in gitlab_ci["broken-tests-packages"]
|
||||
"broken-tests-packages" in ci_config
|
||||
and job_spec.name in ci_config["broken-tests-packages"]
|
||||
)
|
||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||
if args.tests and broken_tests:
|
||||
tty.warn(
|
||||
"Unable to run stand-alone tests since listed in "
|
||||
"gitlab-ci's 'broken-tests-packages'"
|
||||
)
|
||||
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
|
||||
if cdash_handler:
|
||||
msg = "Package is listed in gitlab-ci's broken-tests-packages"
|
||||
msg = "Package is listed in ci's broken-tests-packages"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
elif args.tests:
|
||||
@@ -699,8 +680,8 @@ def ci_rebuild(args):
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and "broken-specs-url" in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci["broken-specs-url"]
|
||||
if spack_is_develop_pipeline and "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
just_built_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
@@ -717,9 +698,9 @@ def ci_rebuild(args):
|
||||
else:
|
||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||
|
||||
api_root_url = get_env_var("CI_API_V4_URL")
|
||||
ci_project_id = get_env_var("CI_PROJECT_ID")
|
||||
ci_job_id = get_env_var("CI_JOB_ID")
|
||||
api_root_url = os.environ.get("CI_API_V4_URL")
|
||||
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
||||
ci_job_id = os.environ.get("CI_JOB_ID")
|
||||
|
||||
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
|
||||
api_root_url, ci_project_id, ci_job_id
|
||||
|
||||
@@ -13,11 +13,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import (
|
||||
ArgparseCompletionWriter,
|
||||
ArgparseRstWriter,
|
||||
ArgparseWriter,
|
||||
)
|
||||
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
@@ -42,7 +38,7 @@
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -514,7 +514,15 @@ def add_concretizer_args(subparser):
|
||||
dest="concretizer:reuse",
|
||||
const=True,
|
||||
default=None,
|
||||
help="reuse installed dependencies/buildcaches when possible",
|
||||
help="reuse installed packages/buildcaches when possible",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--reuse-deps",
|
||||
action=ConfigSetAction,
|
||||
dest="concretizer:reuse",
|
||||
const="dependencies",
|
||||
default=None,
|
||||
help="reuse installed dependencies only",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -12,7 +12,11 @@
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import traverse
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
@@ -38,6 +42,41 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
class AreDepsInstalledVisitor:
|
||||
def __init__(self, context="build"):
|
||||
if context not in ("build", "test"):
|
||||
raise ValueError("context can only be build or test")
|
||||
|
||||
if context == "build":
|
||||
self.direct_deps = ("build", "link", "run")
|
||||
else:
|
||||
self.direct_deps = ("build", "test", "link", "run")
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
def accept(self, item):
|
||||
# The root may be installed or uninstalled.
|
||||
if item.depth == 0:
|
||||
return True
|
||||
|
||||
# Early exit after we've seen an uninstalled dep.
|
||||
if self.has_uninstalled_deps:
|
||||
return False
|
||||
|
||||
spec = item.edge.spec
|
||||
if not spec.external and not spec.installed:
|
||||
self.has_uninstalled_deps = True
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def neighbors(self, item):
|
||||
# Direct deps: follow build & test edges.
|
||||
# Transitive deps: follow link / run.
|
||||
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
|
||||
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
if not args.spec:
|
||||
tty.die("spack %s requires a spec." % cmd_name)
|
||||
@@ -65,6 +104,27 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
|
||||
# Require that dependencies are installed.
|
||||
visitor = AreDepsInstalledVisitor(context=context)
|
||||
|
||||
# Mass install check needs read transaction.
|
||||
with spack.store.db.read_transaction():
|
||||
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
|
||||
|
||||
if visitor.has_uninstalled_deps:
|
||||
raise spack.error.SpackError(
|
||||
f"Not all dependencies of {spec.name} are installed. "
|
||||
f"Cannot setup {context} environment:",
|
||||
spec.tree(
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
# This shows more than necessary, but we cannot dynamically change deptypes
|
||||
# in Spec.tree(...).
|
||||
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
|
||||
),
|
||||
)
|
||||
|
||||
build_environment.setup_package(spec.package, args.dirty, context)
|
||||
|
||||
if args.dump:
|
||||
|
||||
@@ -408,13 +408,7 @@ def config_prefer_upstream(args):
|
||||
pkgs = {}
|
||||
for spec in pref_specs:
|
||||
# Collect all the upstream compilers and versions for this package.
|
||||
pkg = pkgs.get(
|
||||
spec.name,
|
||||
{
|
||||
"version": [],
|
||||
"compiler": [],
|
||||
},
|
||||
)
|
||||
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
|
||||
pkgs[spec.name] = pkg
|
||||
|
||||
# We have no existing variant if this is our first added version.
|
||||
|
||||
@@ -16,19 +16,10 @@
|
||||
import spack.stage
|
||||
import spack.util.web
|
||||
from spack.spec import Spec
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
UndetectableVersionError,
|
||||
parse_name,
|
||||
parse_version,
|
||||
)
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.naming import (
|
||||
mod_to_class,
|
||||
simplify_name,
|
||||
valid_fully_qualified_module_name,
|
||||
)
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
|
||||
@@ -96,8 +96,5 @@ def report(args):
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
action = {
|
||||
"create-db-tarball": create_db_tarball,
|
||||
"report": report,
|
||||
}
|
||||
action = {"create-db-tarball": create_db_tarball, "report": report}
|
||||
action[args.debug_command](args)
|
||||
|
||||
@@ -33,12 +33,7 @@
|
||||
level = "long"
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": True,
|
||||
"variants": True,
|
||||
"indent": 4,
|
||||
}
|
||||
display_args = {"long": True, "show_flags": True, "variants": True, "indent": 4}
|
||||
|
||||
|
||||
def setup_parser(sp):
|
||||
|
||||
@@ -80,22 +80,12 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(
|
||||
a,
|
||||
body=True,
|
||||
expand_hashes=True,
|
||||
concrete_build_deps=True,
|
||||
)
|
||||
for func in setup.spec_clauses(a, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||
if func.name == "attr"
|
||||
)
|
||||
b_facts = set(
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(
|
||||
b,
|
||||
body=True,
|
||||
expand_hashes=True,
|
||||
concrete_build_deps=True,
|
||||
)
|
||||
for func in setup.spec_clauses(b, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||
if func.name == "attr"
|
||||
)
|
||||
|
||||
|
||||
@@ -148,8 +148,7 @@ def env_activate(args):
|
||||
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env activate",
|
||||
" eval `spack env activate {sh_arg} [...]`",
|
||||
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
|
||||
)
|
||||
return 1
|
||||
|
||||
@@ -166,7 +165,7 @@ def env_activate(args):
|
||||
short_name = os.path.basename(env_path)
|
||||
ev.Environment(env).write(regenerate=False)
|
||||
|
||||
# Named environment
|
||||
# Managed environment
|
||||
elif ev.exists(env_name_or_dir) and not args.dir:
|
||||
env_path = ev.root(env_name_or_dir)
|
||||
short_name = env_name_or_dir
|
||||
@@ -238,8 +237,7 @@ def env_deactivate_setup_parser(subparser):
|
||||
def env_deactivate(args):
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env deactivate",
|
||||
" eval `spack env deactivate {sh_arg}`",
|
||||
"spack env deactivate", " eval `spack env deactivate {sh_arg}`"
|
||||
)
|
||||
return 1
|
||||
|
||||
|
||||
@@ -38,11 +38,7 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="packages with detected externals won't be built with Spack",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--exclude",
|
||||
action="append",
|
||||
help="packages to exclude from search",
|
||||
)
|
||||
find_parser.add_argument("--exclude", action="append", help="packages to exclude from search")
|
||||
find_parser.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
@@ -187,7 +183,6 @@ def external_read_cray_manifest(args):
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
||||
):
|
||||
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
manifest_files.append(manifest_file)
|
||||
|
||||
@@ -25,10 +25,7 @@ def setup_parser(subparser):
|
||||
help="fetch only missing (not yet installed) dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-D",
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
help="also fetch all dependencies",
|
||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
subparser.epilog = (
|
||||
|
||||
@@ -9,13 +9,7 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.graph import (
|
||||
DAGWithDependencyTypes,
|
||||
SimpleDAG,
|
||||
graph_ascii,
|
||||
graph_dot,
|
||||
static_graph_dot,
|
||||
)
|
||||
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
||||
|
||||
description = "generate graphs of package dependency relationships"
|
||||
section = "basic"
|
||||
|
||||
@@ -39,19 +39,14 @@
|
||||
compiler flags:
|
||||
@g{cflags="flags"} cppflags, cflags, cxxflags,
|
||||
fflags, ldflags, ldlibs
|
||||
@g{cflags=="flags"} propagate flags to package dependencies
|
||||
cppflags, cflags, cxxflags, fflags,
|
||||
ldflags, ldlibs
|
||||
@g{==} propagate flags to package dependencies
|
||||
|
||||
variants:
|
||||
@B{+variant} enable <variant>
|
||||
@B{++variant} propagate enable <variant>
|
||||
@r{-variant} or @r{~variant} disable <variant>
|
||||
@r{--variant} or @r{~~variant} propagate disable <variant>
|
||||
@B{variant=value} set non-boolean <variant> to <value>
|
||||
@B{variant==value} propagate non-boolean <variant> to <value>
|
||||
@B{variant=value1,value2,value3} set multi-value <variant> values
|
||||
@B{variant==value1,value2,value3} propagate multi-value <variant> values
|
||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||
|
||||
architecture variants:
|
||||
@m{platform=platform} linux, darwin, cray, etc.
|
||||
@@ -87,9 +82,7 @@
|
||||
"""
|
||||
|
||||
|
||||
guides = {
|
||||
"spec": spec_guide,
|
||||
}
|
||||
guides = {"spec": spec_guide}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
||||
@@ -283,7 +283,7 @@ def print_tests(pkg):
|
||||
c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
|
||||
if pkg.name in c_names:
|
||||
v_names.extend(["c", "cxx", "fortran"])
|
||||
if pkg.spec.satisfies("llvm+clang"):
|
||||
if pkg.spec.intersects("llvm+clang"):
|
||||
v_names.extend(["c", "cxx"])
|
||||
# TODO Refactor END
|
||||
|
||||
|
||||
@@ -263,146 +263,6 @@ def report_filename(args: argparse.Namespace, specs: List[spack.spec.Spec]) -> s
|
||||
return result
|
||||
|
||||
|
||||
def install_specs(specs, install_kwargs, cli_args):
|
||||
try:
|
||||
if ev.active_environment():
|
||||
install_specs_inside_environment(specs, install_kwargs, cli_args)
|
||||
else:
|
||||
install_specs_outside_environment(specs, install_kwargs)
|
||||
except spack.build_environment.InstallError as e:
|
||||
if cli_args.show_log_on_error:
|
||||
e.print_context()
|
||||
assert e.pkg, "Expected InstallError to include the associated package"
|
||||
if not os.path.exists(e.pkg.build_log_path):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
sys.stderr.write("Full build log:\n")
|
||||
with open(e.pkg.build_log_path) as log:
|
||||
shutil.copyfileobj(log, sys.stderr)
|
||||
raise
|
||||
|
||||
|
||||
def install_specs_inside_environment(specs, install_kwargs, cli_args):
|
||||
specs_to_install, specs_to_add = [], []
|
||||
env = ev.active_environment()
|
||||
for abstract, concrete in specs:
|
||||
# This won't find specs added to the env since last
|
||||
# concretize, therefore should we consider enforcing
|
||||
# concretization of the env before allowing to install
|
||||
# specs?
|
||||
m_spec = env.matching_spec(abstract)
|
||||
|
||||
# If there is any ambiguity in the above call to matching_spec
|
||||
# (i.e. if more than one spec in the environment matches), then
|
||||
# SpackEnvironmentError is raised, with a message listing the
|
||||
# the matches. Getting to this point means there were either
|
||||
# no matches or exactly one match.
|
||||
|
||||
if not m_spec and not cli_args.add:
|
||||
msg = (
|
||||
"Cannot install '{0}' because it is not in the current environment."
|
||||
" You can add it to the environment with 'spack add {0}', or as part"
|
||||
" of the install command with 'spack install --add {0}'"
|
||||
).format(str(abstract))
|
||||
tty.die(msg)
|
||||
|
||||
if not m_spec:
|
||||
tty.debug("adding {0} as a root".format(abstract.name))
|
||||
specs_to_add.append((abstract, concrete))
|
||||
continue
|
||||
|
||||
tty.debug("exactly one match for {0} in env -> {1}".format(m_spec.name, m_spec.dag_hash()))
|
||||
|
||||
if m_spec in env.roots() or not cli_args.add:
|
||||
# either the single match is a root spec (in which case
|
||||
# the spec is not added to the env again), or the user did
|
||||
# not specify --add (in which case it is assumed we are
|
||||
# installing already-concretized specs in the env)
|
||||
tty.debug("just install {0}".format(m_spec.name))
|
||||
specs_to_install.append(m_spec)
|
||||
else:
|
||||
# the single match is not a root (i.e. it's a dependency),
|
||||
# and --add was specified, so we'll add it as a
|
||||
# root before installing
|
||||
tty.debug("add {0} then install it".format(m_spec.name))
|
||||
specs_to_add.append((abstract, concrete))
|
||||
if specs_to_add:
|
||||
tty.debug("Adding the following specs as roots:")
|
||||
for abstract, concrete in specs_to_add:
|
||||
tty.debug(" {0}".format(abstract.name))
|
||||
with env.write_transaction():
|
||||
specs_to_install.append(env.concretize_and_add(abstract, concrete))
|
||||
env.write(regenerate=False)
|
||||
# Install the validated list of cli specs
|
||||
if specs_to_install:
|
||||
tty.debug("Installing the following cli specs:")
|
||||
for s in specs_to_install:
|
||||
tty.debug(" {0}".format(s.name))
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
|
||||
|
||||
def install_specs_outside_environment(specs, install_kwargs):
|
||||
installs = [(concrete.package, install_kwargs) for _, concrete in specs]
|
||||
builder = PackageInstaller(installs)
|
||||
builder.install()
|
||||
|
||||
|
||||
def install_all_specs_from_active_environment(
|
||||
install_kwargs, only_concrete, cli_test_arg, reporter_factory
|
||||
):
|
||||
"""Install all specs from the active environment
|
||||
|
||||
Args:
|
||||
install_kwargs (dict): dictionary of options to be passed to the installer
|
||||
only_concrete (bool): if true don't concretize the environment, but install
|
||||
only the specs that are already concrete
|
||||
cli_test_arg (bool or str): command line argument to select which test to run
|
||||
reporter: reporter object for the installations
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if not env:
|
||||
msg = "install requires a package argument or active environment"
|
||||
if "spack.yaml" in os.listdir(os.getcwd()):
|
||||
# There's a spack.yaml file in the working dir, the user may
|
||||
# have intended to use that
|
||||
msg += "\n\n"
|
||||
msg += "Did you mean to install using the `spack.yaml`"
|
||||
msg += " in this directory? Try: \n"
|
||||
msg += " spack env activate .\n"
|
||||
msg += " spack install\n"
|
||||
msg += " OR\n"
|
||||
msg += " spack --env . install"
|
||||
tty.die(msg)
|
||||
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(env.user_specs, cli_test_arg)
|
||||
if not only_concrete:
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(tests=install_kwargs["tests"])
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
env.write(regenerate=False)
|
||||
|
||||
specs = env.all_specs()
|
||||
if not specs:
|
||||
msg = "{0} environment has no specs to install".format(env.name)
|
||||
tty.msg(msg)
|
||||
return
|
||||
|
||||
reporter = reporter_factory(specs) or lang.nullcontext()
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter:
|
||||
env.install_all(**install_kwargs)
|
||||
|
||||
tty.debug("Regenerating environment views for {0}".format(env.name))
|
||||
with env.write_transaction():
|
||||
# write env to trigger view generation and modulefile
|
||||
# generation
|
||||
env.write()
|
||||
|
||||
|
||||
def compute_tests_install_kwargs(specs, cli_test_arg):
|
||||
"""Translate the test cli argument into the proper install argument"""
|
||||
if cli_test_arg == "all":
|
||||
@@ -412,43 +272,6 @@ def compute_tests_install_kwargs(specs, cli_test_arg):
|
||||
return False
|
||||
|
||||
|
||||
def specs_from_cli(args, install_kwargs):
|
||||
"""Return abstract and concrete spec parsed from the command line."""
|
||||
abstract_specs = spack.cmd.parse_specs(args.spec)
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(abstract_specs, args.test)
|
||||
try:
|
||||
concrete_specs = spack.cmd.parse_specs(
|
||||
args.spec, concretize=True, tests=install_kwargs["tests"]
|
||||
)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
if args.log_format is not None:
|
||||
reporter = args.reporter()
|
||||
reporter.concretization_report(report_filename(args, abstract_specs), e.message)
|
||||
raise
|
||||
return abstract_specs, concrete_specs
|
||||
|
||||
|
||||
def concrete_specs_from_file(args):
|
||||
"""Return the list of concrete specs read from files."""
|
||||
result = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
if file.endswith("yaml") or file.endswith("yml"):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = s.concretized()
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
msg = 'skipped invalid file "{0}". '
|
||||
msg += "The file does not contain a concrete spec."
|
||||
tty.warn(msg.format(file))
|
||||
continue
|
||||
result.append(concretized)
|
||||
return result
|
||||
|
||||
|
||||
def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
if args.yes_to_all:
|
||||
return
|
||||
@@ -475,12 +298,40 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
tty.die("Reinstallation aborted.")
|
||||
|
||||
|
||||
def _dump_log_on_error(e: spack.build_environment.InstallError):
|
||||
e.print_context()
|
||||
assert e.pkg, "Expected InstallError to include the associated package"
|
||||
if not os.path.exists(e.pkg.build_log_path):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
sys.stderr.write("Full build log:\n")
|
||||
with open(e.pkg.build_log_path, errors="replace") as log:
|
||||
shutil.copyfileobj(log, sys.stderr)
|
||||
|
||||
|
||||
def _die_require_env():
|
||||
msg = "install requires a package argument or active environment"
|
||||
if "spack.yaml" in os.listdir(os.getcwd()):
|
||||
# There's a spack.yaml file in the working dir, the user may
|
||||
# have intended to use that
|
||||
msg += (
|
||||
"\n\n"
|
||||
"Did you mean to install using the `spack.yaml`"
|
||||
" in this directory? Try: \n"
|
||||
" spack env activate .\n"
|
||||
" spack install\n"
|
||||
" OR\n"
|
||||
" spack --env . install"
|
||||
)
|
||||
tty.die(msg)
|
||||
|
||||
|
||||
def install(parser, args):
|
||||
# TODO: unify args.verbose?
|
||||
tty.set_verbose(args.verbose or args.install_verbose)
|
||||
|
||||
if args.help_cdash:
|
||||
spack.cmd.common.arguments.print_cdash_help()
|
||||
arguments.print_cdash_help()
|
||||
return
|
||||
|
||||
if args.no_checksum:
|
||||
@@ -489,45 +340,150 @@ def install(parser, args):
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||
arguments.sanitize_reporter_options(args)
|
||||
|
||||
def reporter_factory(specs):
|
||||
if args.log_format is None:
|
||||
return None
|
||||
return lang.nullcontext()
|
||||
|
||||
context_manager = spack.report.build_context_manager(
|
||||
reporter=args.reporter(),
|
||||
filename=report_filename(args, specs=specs),
|
||||
specs=specs,
|
||||
return spack.report.build_context_manager(
|
||||
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
|
||||
)
|
||||
return context_manager
|
||||
|
||||
install_kwargs = install_kwargs_from_args(args)
|
||||
|
||||
if not args.spec and not args.specfiles:
|
||||
# If there are no args but an active environment then install the packages from it.
|
||||
install_all_specs_from_active_environment(
|
||||
install_kwargs=install_kwargs,
|
||||
only_concrete=args.only_concrete,
|
||||
cli_test_arg=args.test,
|
||||
reporter_factory=reporter_factory,
|
||||
)
|
||||
env = ev.active_environment()
|
||||
|
||||
if not env and not args.spec and not args.specfiles:
|
||||
_die_require_env()
|
||||
|
||||
try:
|
||||
if env:
|
||||
install_with_active_env(env, args, install_kwargs, reporter_factory)
|
||||
else:
|
||||
install_without_active_env(args, install_kwargs, reporter_factory)
|
||||
except spack.build_environment.InstallError as e:
|
||||
if args.show_log_on_error:
|
||||
_dump_log_on_error(e)
|
||||
raise
|
||||
|
||||
|
||||
def _maybe_add_and_concretize(args, env, specs):
|
||||
"""Handle the overloaded spack install behavior of adding
|
||||
and automatically concretizing specs"""
|
||||
|
||||
# Users can opt out of accidental concretizations with --only-concrete
|
||||
if args.only_concrete:
|
||||
return
|
||||
|
||||
# Specs from CLI
|
||||
abstract_specs, concrete_specs = specs_from_cli(args, install_kwargs)
|
||||
# Otherwise, we will modify the environment.
|
||||
with env.write_transaction():
|
||||
# `spack add` adds these specs.
|
||||
if args.add:
|
||||
for spec in specs:
|
||||
env.add(spec)
|
||||
|
||||
# Concrete specs from YAML or JSON files
|
||||
specs_from_file = concrete_specs_from_file(args)
|
||||
abstract_specs.extend(specs_from_file)
|
||||
concrete_specs.extend(specs_from_file)
|
||||
# `spack concretize`
|
||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
env.write(regenerate=False)
|
||||
|
||||
|
||||
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_factory):
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
||||
# The following two commands are equivalent:
|
||||
# 1. `spack install --add x y z`
|
||||
# 2. `spack add x y z && spack concretize && spack install --only-concrete`
|
||||
# here we do the `add` and `concretize` part.
|
||||
_maybe_add_and_concretize(args, env, specs)
|
||||
|
||||
# Now we're doing `spack install --only-concrete`.
|
||||
if args.add or not specs:
|
||||
specs_to_install = env.concrete_roots()
|
||||
if not specs_to_install:
|
||||
tty.msg(f"{env.name} environment has no specs to install")
|
||||
return
|
||||
|
||||
# `spack install x y z` without --add is installing matching specs in the env.
|
||||
else:
|
||||
specs_to_install = env.all_matching_specs(*specs)
|
||||
if not specs_to_install:
|
||||
msg = (
|
||||
"Cannot install '{0}' because no matching specs are in the current environment."
|
||||
" You can add specs to the environment with 'spack add {0}', or as part"
|
||||
" of the install command with 'spack install --add {0}'"
|
||||
).format(" ".join(args.spec))
|
||||
tty.die(msg)
|
||||
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(specs_to_install, args.test)
|
||||
|
||||
if args.overwrite:
|
||||
require_user_confirmation_for_overwrite(specs_to_install, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in specs_to_install]
|
||||
|
||||
try:
|
||||
with reporter_factory(specs_to_install):
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
finally:
|
||||
# TODO: this is doing way too much to trigger
|
||||
# views and modules to be generated.
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
|
||||
|
||||
def concrete_specs_from_cli(args, install_kwargs):
|
||||
"""Return abstract and concrete spec parsed from the command line."""
|
||||
abstract_specs = spack.cmd.parse_specs(args.spec)
|
||||
install_kwargs["tests"] = compute_tests_install_kwargs(abstract_specs, args.test)
|
||||
try:
|
||||
concrete_specs = spack.cmd.parse_specs(
|
||||
args.spec, concretize=True, tests=install_kwargs["tests"]
|
||||
)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
if args.log_format is not None:
|
||||
reporter = args.reporter()
|
||||
reporter.concretization_report(report_filename(args, abstract_specs), e.message)
|
||||
raise
|
||||
return concrete_specs
|
||||
|
||||
|
||||
def concrete_specs_from_file(args):
|
||||
"""Return the list of concrete specs read from files."""
|
||||
result = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
if file.endswith("yaml") or file.endswith("yml"):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = s.concretized()
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
msg = 'skipped invalid file "{0}". '
|
||||
msg += "The file does not contain a concrete spec."
|
||||
tty.warn(msg.format(file))
|
||||
continue
|
||||
result.append(concretized)
|
||||
return result
|
||||
|
||||
|
||||
def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
concrete_specs = concrete_specs_from_cli(args, install_kwargs) + concrete_specs_from_file(args)
|
||||
|
||||
if len(concrete_specs) == 0:
|
||||
tty.die("The `spack install` command requires a spec to install.")
|
||||
|
||||
reporter = reporter_factory(concrete_specs) or lang.nullcontext()
|
||||
with reporter:
|
||||
with reporter_factory(concrete_specs):
|
||||
if args.overwrite:
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
install_specs(zip(abstract_specs, concrete_specs), install_kwargs, args)
|
||||
|
||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
||||
builder = PackageInstaller(installs)
|
||||
builder.install()
|
||||
|
||||
@@ -58,10 +58,7 @@
|
||||
|
||||
#: licensed files that can have LGPL language in them
|
||||
#: so far, just this command -- so it can find LGPL things elsewhere
|
||||
lgpl_exceptions = [
|
||||
r"lib/spack/spack/cmd/license.py",
|
||||
r"lib/spack/spack/test/cmd/license.py",
|
||||
]
|
||||
lgpl_exceptions = [r"lib/spack/spack/cmd/license.py", r"lib/spack/spack/test/cmd/license.py"]
|
||||
|
||||
|
||||
def _all_spack_files(root=spack.paths.prefix):
|
||||
@@ -129,7 +126,6 @@ def error_messages(self):
|
||||
|
||||
|
||||
def _check_license(lines, path):
|
||||
|
||||
found = []
|
||||
|
||||
for line in lines:
|
||||
|
||||
@@ -98,8 +98,7 @@ def load(parser, args):
|
||||
if not args.shell:
|
||||
specs_str = " ".join(args.constraint) or "SPECS"
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack load",
|
||||
" eval `spack load {sh_arg} %s`" % specs_str,
|
||||
"spack load", " eval `spack load {sh_arg} %s`" % specs_str
|
||||
)
|
||||
return 1
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ def location(parser, args):
|
||||
spack.cmd.require_active_env("location -e")
|
||||
path = ev.active_environment().path
|
||||
else:
|
||||
# Get named environment path
|
||||
# Get path of requested environment
|
||||
if not ev.exists(args.location_env):
|
||||
tty.die("no such environment: '%s'" % args.location_env)
|
||||
path = ev.root(args.location_env)
|
||||
|
||||
@@ -27,12 +27,7 @@
|
||||
"""
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": False,
|
||||
"variants": False,
|
||||
"indent": 4,
|
||||
}
|
||||
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
||||
@@ -335,7 +335,7 @@ def not_excluded_fn(args):
|
||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
|
||||
def not_excluded(x):
|
||||
return not any(x.satisfies(y, strict=True) for y in exclude_specs)
|
||||
return not any(x.satisfies(y) for y in exclude_specs)
|
||||
|
||||
return not_excluded
|
||||
|
||||
@@ -445,9 +445,7 @@ def mirror_create(args):
|
||||
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs,
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
||||
)
|
||||
|
||||
|
||||
@@ -467,9 +465,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs,
|
||||
path=path,
|
||||
skip_unstable_versions=skip_unstable_versions,
|
||||
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -180,10 +180,7 @@ def loads(module_type, specs, args, out=None):
|
||||
for spec in specs
|
||||
)
|
||||
|
||||
module_commands = {
|
||||
"tcl": "module load ",
|
||||
"lmod": "module load ",
|
||||
}
|
||||
module_commands = {"tcl": "module load ", "lmod": "module load "}
|
||||
|
||||
d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
|
||||
|
||||
@@ -368,18 +365,14 @@ def refresh(module_type, specs, args):
|
||||
|
||||
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": True},
|
||||
}
|
||||
constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
# Get the specs that match the query from the DB
|
||||
specs = args.specs(**query_args)
|
||||
|
||||
try:
|
||||
|
||||
callbacks[args.subparser_name](module_type, specs, args)
|
||||
|
||||
except MultipleSpecsMatch:
|
||||
|
||||
@@ -182,11 +182,7 @@ def solve(parser, args):
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
||||
@@ -110,7 +110,7 @@ def spec(parser, args):
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for (input, output) in specs:
|
||||
for input, output in specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
|
||||
@@ -30,20 +30,13 @@ def grouper(iterable, n, fillvalue=None):
|
||||
|
||||
|
||||
#: List of directories to exclude from checks -- relative to spack root
|
||||
exclude_directories = [
|
||||
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
|
||||
]
|
||||
exclude_directories = [os.path.relpath(spack.paths.external_path, spack.paths.prefix)]
|
||||
|
||||
#: Order in which tools should be run. flake8 is last so that it can
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_names = [
|
||||
"isort",
|
||||
"black",
|
||||
"flake8",
|
||||
"mypy",
|
||||
]
|
||||
tool_names = ["isort", "black", "flake8", "mypy"]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
@@ -52,7 +45,7 @@ def grouper(iterable, n, fillvalue=None):
|
||||
mypy_ignores = [
|
||||
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
|
||||
# doesn't exist in mypy 0.971 for Python 3.6
|
||||
"[annotation-unchecked]",
|
||||
"[annotation-unchecked]"
|
||||
]
|
||||
|
||||
|
||||
@@ -150,10 +143,7 @@ def setup_parser(subparser):
|
||||
help="branch to compare against to determine changed files (default: develop)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="check all files, not just changed files",
|
||||
"-a", "--all", action="store_true", help="check all files, not just changed files"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
@@ -178,10 +168,7 @@ def setup_parser(subparser):
|
||||
help="format automatically if possible (e.g., with isort, black)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--root",
|
||||
action="store",
|
||||
default=None,
|
||||
help="style check a different spack instance",
|
||||
"--root", action="store", default=None, help="style check a different spack instance"
|
||||
)
|
||||
|
||||
tool_group = subparser.add_mutually_exclusive_group()
|
||||
@@ -211,6 +198,7 @@ def rewrite_and_print_output(
|
||||
output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:"
|
||||
):
|
||||
"""rewrite ouput with <file>:<line>: format to respect path args"""
|
||||
|
||||
# print results relative to current working directory
|
||||
def translate(match):
|
||||
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
||||
@@ -281,24 +269,10 @@ def run_mypy(mypy_cmd, file_list, args):
|
||||
os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||
"--show-error-codes",
|
||||
]
|
||||
mypy_arg_sets = [
|
||||
common_mypy_args
|
||||
+ [
|
||||
"--package",
|
||||
"spack",
|
||||
"--package",
|
||||
"llnl",
|
||||
]
|
||||
]
|
||||
mypy_arg_sets = [common_mypy_args + ["--package", "spack", "--package", "llnl"]]
|
||||
if "SPACK_MYPY_CHECK_PACKAGES" in os.environ:
|
||||
mypy_arg_sets.append(
|
||||
common_mypy_args
|
||||
+ [
|
||||
"--package",
|
||||
"packages",
|
||||
"--disable-error-code",
|
||||
"no-redef",
|
||||
]
|
||||
common_mypy_args + ["--package", "packages", "--disable-error-code", "no-redef"]
|
||||
)
|
||||
|
||||
returncode = 0
|
||||
|
||||
@@ -33,9 +33,7 @@ def setup_parser(subparser):
|
||||
|
||||
# Run
|
||||
run_parser = sp.add_parser(
|
||||
"run",
|
||||
description=test_run.__doc__,
|
||||
help=spack.cmd.first_line(test_run.__doc__),
|
||||
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
|
||||
)
|
||||
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
@@ -80,9 +78,7 @@ def setup_parser(subparser):
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser(
|
||||
"list",
|
||||
description=test_list.__doc__,
|
||||
help=spack.cmd.first_line(test_list.__doc__),
|
||||
"list", description=test_list.__doc__, help=spack.cmd.first_line(test_list.__doc__)
|
||||
)
|
||||
list_parser.add_argument(
|
||||
"-a",
|
||||
@@ -96,9 +92,7 @@ def setup_parser(subparser):
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
"find",
|
||||
description=test_find.__doc__,
|
||||
help=spack.cmd.first_line(test_find.__doc__),
|
||||
"find", description=test_find.__doc__, help=spack.cmd.first_line(test_find.__doc__)
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"filter",
|
||||
@@ -108,9 +102,7 @@ def setup_parser(subparser):
|
||||
|
||||
# Status
|
||||
status_parser = sp.add_parser(
|
||||
"status",
|
||||
description=test_status.__doc__,
|
||||
help=spack.cmd.first_line(test_status.__doc__),
|
||||
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
|
||||
)
|
||||
status_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||
@@ -147,9 +139,7 @@ def setup_parser(subparser):
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser(
|
||||
"remove",
|
||||
description=test_remove.__doc__,
|
||||
help=spack.cmd.first_line(test_remove.__doc__),
|
||||
"remove", description=test_remove.__doc__, help=spack.cmd.first_line(test_remove.__doc__)
|
||||
)
|
||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||
remove_parser.add_argument(
|
||||
@@ -189,11 +179,7 @@ def test_run(args):
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
specs_to_test = []
|
||||
for spec in specs:
|
||||
matching = spack.store.db.query_local(
|
||||
spec,
|
||||
hashes=hashes,
|
||||
explicit=explicit,
|
||||
)
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
"""
|
||||
@@ -228,14 +214,7 @@ def test_run(args):
|
||||
|
||||
|
||||
def report_filename(args, test_suite):
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
return args.log_file
|
||||
else:
|
||||
log_dir = os.getcwd()
|
||||
return os.path.join(log_dir, args.log_file)
|
||||
else:
|
||||
return os.path.join(os.getcwd(), "test-%s" % test_suite.name)
|
||||
return os.path.abspath(args.log_file or "test-{}".format(test_suite.name))
|
||||
|
||||
|
||||
def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
|
||||
description = (
|
||||
"run a command in a spec's test environment, " "or dump its environment to screen or file"
|
||||
"run a command in a spec's test environment, or dump its environment to screen or file"
|
||||
)
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
@@ -31,12 +31,7 @@
|
||||
"""
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": False,
|
||||
"variants": False,
|
||||
"indent": 4,
|
||||
}
|
||||
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -133,7 +128,7 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False, o
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def installed_dependents(specs, env):
|
||||
def installed_runtime_dependents(specs, env):
|
||||
"""Map each spec to a list of its installed dependents.
|
||||
|
||||
Args:
|
||||
@@ -160,10 +155,10 @@ def installed_dependents(specs, env):
|
||||
|
||||
for spec in specs:
|
||||
for dpt in traverse.traverse_nodes(
|
||||
spec.dependents(deptype="all"),
|
||||
spec.dependents(deptype=("link", "run")),
|
||||
direction="parents",
|
||||
visited=visited,
|
||||
deptype="all",
|
||||
deptype=("link", "run"),
|
||||
root=True,
|
||||
key=lambda s: s.dag_hash(),
|
||||
):
|
||||
@@ -236,12 +231,7 @@ def do_uninstall(specs, force=False):
|
||||
hashes_to_remove = set(s.dag_hash() for s in specs)
|
||||
|
||||
for s in traverse.traverse_nodes(
|
||||
specs,
|
||||
order="topo",
|
||||
direction="children",
|
||||
root=True,
|
||||
cover="nodes",
|
||||
deptype="all",
|
||||
specs, order="topo", direction="children", root=True, cover="nodes", deptype="all"
|
||||
):
|
||||
if s.dag_hash() in hashes_to_remove:
|
||||
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
|
||||
@@ -265,7 +255,7 @@ def get_uninstall_list(args, specs, env):
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
base_uninstall_specs = set(find_matching_specs(env, specs, args.all, args.force))
|
||||
|
||||
active_dpts, outside_dpts = installed_dependents(base_uninstall_specs, env)
|
||||
active_dpts, outside_dpts = installed_runtime_dependents(base_uninstall_specs, env)
|
||||
# It will be useful to track the unified set of specs with dependents, as
|
||||
# well as to separately track specs in the current env with dependents
|
||||
spec_to_dpts = {}
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
description = "run spack's unit tests (wrapper around pytest)"
|
||||
section = "developer"
|
||||
level = "long"
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -212,7 +211,7 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if not is_windows:
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user