Compare commits
819 Commits
features/k
...
features/g
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0d092d671f | ||
![]() |
9956841331 | ||
![]() |
e9f1cfdaaf | ||
![]() |
060582a21d | ||
![]() |
c392454125 | ||
![]() |
fca81c2ac8 | ||
![]() |
bafd84e191 | ||
![]() |
1e08f31e16 | ||
![]() |
1da7839898 | ||
![]() |
a4a22a6926 | ||
![]() |
ceb94bd6ae | ||
![]() |
b745e208a3 | ||
![]() |
6a8383b2a7 | ||
![]() |
6210e694e1 | ||
![]() |
2f889b045c | ||
![]() |
bc3b90f6ac | ||
![]() |
1f323d296d | ||
![]() |
819cd41ee4 | ||
![]() |
c0069210e2 | ||
![]() |
a1d792af4c | ||
![]() |
b7e61a4b75 | ||
![]() |
59832fb0ac | ||
![]() |
5fa075f5b4 | ||
![]() |
4573741baa | ||
![]() |
e0d8f67f34 | ||
![]() |
0545f7d5cc | ||
![]() |
3a9028427c | ||
![]() |
729726d157 | ||
![]() |
81962f100c | ||
![]() |
d54a692e09 | ||
![]() |
b12f38383c | ||
![]() |
9084ad69b4 | ||
![]() |
59d8031076 | ||
![]() |
5fddd48f80 | ||
![]() |
d8b95a496c | ||
![]() |
161f0d5045 | ||
![]() |
d83f7110d5 | ||
![]() |
edb1d75b1b | ||
![]() |
ed9b38c8e3 | ||
![]() |
1a5891754a | ||
![]() |
d916d801f2 | ||
![]() |
46d770b416 | ||
![]() |
6979a63396 | ||
![]() |
f9314d38b0 | ||
![]() |
e47f0d486c | ||
![]() |
fd0884c273 | ||
![]() |
4033cc0250 | ||
![]() |
c309adb4b3 | ||
![]() |
c2a6ccbea8 | ||
![]() |
ca94240dd4 | ||
![]() |
0ac751b27b | ||
![]() |
9ef1dbd0ef | ||
![]() |
c3d5232d5b | ||
![]() |
f4e66b306e | ||
![]() |
c33382b607 | ||
![]() |
47b16b39a3 | ||
![]() |
7018a42211 | ||
![]() |
28f71c4d12 | ||
![]() |
26455a4ac2 | ||
![]() |
4e4b199f16 | ||
![]() |
0fb5a39c17 | ||
![]() |
a86279cc52 | ||
![]() |
fd111a3395 | ||
![]() |
32b6da8d57 | ||
![]() |
c6e538583f | ||
![]() |
83298160cc | ||
![]() |
54fbe555cd | ||
![]() |
c4e85faa2d | ||
![]() |
a59edb2826 | ||
![]() |
2e2fbc6408 | ||
![]() |
5abbd094c7 | ||
![]() |
ca58cb701c | ||
![]() |
a358358aa7 | ||
![]() |
84d525dbdf | ||
![]() |
b5fa64fb10 | ||
![]() |
c424b86a64 | ||
![]() |
ca50c91469 | ||
![]() |
13c0b0dcb3 | ||
![]() |
834155fdb8 | ||
![]() |
1badb47b80 | ||
![]() |
1a48c0f51c | ||
![]() |
92e4db4681 | ||
![]() |
dd8dc08a90 | ||
![]() |
3e4b576f83 | ||
![]() |
526315410a | ||
![]() |
66526cb57a | ||
![]() |
beff29176c | ||
![]() |
2af6c57afa | ||
![]() |
accd6dd228 | ||
![]() |
0ad54e0679 | ||
![]() |
bbc9d7d965 | ||
![]() |
6f5ec73087 | ||
![]() |
0aedafda19 | ||
![]() |
ac3ccad1e2 | ||
![]() |
5180b0b454 | ||
![]() |
432f577a0c | ||
![]() |
f6060c9894 | ||
![]() |
c6c9213766 | ||
![]() |
64407e253c | ||
![]() |
de492e73d5 | ||
![]() |
29d344e4c7 | ||
![]() |
95586335f7 | ||
![]() |
f56f4677cf | ||
![]() |
043f0cd014 | ||
![]() |
9841d1f571 | ||
![]() |
d61439c26a | ||
![]() |
0b9baf9ae3 | ||
![]() |
2331148f4b | ||
![]() |
ffec74c359 | ||
![]() |
f162dd4f5b | ||
![]() |
8149048a78 | ||
![]() |
2b7a2f66b7 | ||
![]() |
551766e3c6 | ||
![]() |
35fe188d22 | ||
![]() |
5f4fcea79c | ||
![]() |
487edcc416 | ||
![]() |
f71d93fc55 | ||
![]() |
2d78045cdd | ||
![]() |
8e61f54260 | ||
![]() |
8a7af82a82 | ||
![]() |
0c61b31922 | ||
![]() |
156edffec2 | ||
![]() |
6d484a055a | ||
![]() |
03331de0f2 | ||
![]() |
aabece46ba | ||
![]() |
4c23059017 | ||
![]() |
ab37ac95bf | ||
![]() |
2411a9599e | ||
![]() |
b0ee7deaa7 | ||
![]() |
7adacf967d | ||
![]() |
2633cf7da6 | ||
![]() |
3c6050d3a2 | ||
![]() |
b34f289796 | ||
![]() |
beb3524392 | ||
![]() |
8ee5bf6d03 | ||
![]() |
a018f48df9 | ||
![]() |
e51463f587 | ||
![]() |
378543b554 | ||
![]() |
3cd224afbf | ||
![]() |
44c0089be4 | ||
![]() |
65584a3b92 | ||
![]() |
ab657d7b53 | ||
![]() |
d3d0ee7328 | ||
![]() |
ed17c3638b | ||
![]() |
2f777d08a2 | ||
![]() |
9be81ac4d9 | ||
![]() |
e60e41d9ca | ||
![]() |
a2293e6ee1 | ||
![]() |
d381ab77b2 | ||
![]() |
1bf051e229 | ||
![]() |
506f62ddfe | ||
![]() |
a6a448b16c | ||
![]() |
0dfa49af8e | ||
![]() |
b3128af901 | ||
![]() |
5029b8ca55 | ||
![]() |
0d226aa710 | ||
![]() |
07a9cb87ef | ||
![]() |
7cafe7dd66 | ||
![]() |
40788cf49a | ||
![]() |
025dbb2162 | ||
![]() |
e2b9ba3001 | ||
![]() |
3a4073cfff | ||
![]() |
9577d890c4 | ||
![]() |
f5ab3ad82a | ||
![]() |
b5d3c48824 | ||
![]() |
9d17d474ff | ||
![]() |
50411f8394 | ||
![]() |
32210b0658 | ||
![]() |
98e6e4a3a5 | ||
![]() |
a7c6224b3a | ||
![]() |
9d95125d6a | ||
![]() |
ed07fa4c37 | ||
![]() |
2d97d877e4 | ||
![]() |
7fd4dee962 | ||
![]() |
4f3a538519 | ||
![]() |
5c1710f7dc | ||
![]() |
97ea57e59f | ||
![]() |
c152e558e9 | ||
![]() |
12e87ebf14 | ||
![]() |
1113705080 | ||
![]() |
c3dabf05f4 | ||
![]() |
d0d6b29c9e | ||
![]() |
74389472ab | ||
![]() |
f5d4f5bdac | ||
![]() |
1d4e00a9ff | ||
![]() |
8530ea88a3 | ||
![]() |
e57780d7f0 | ||
![]() |
e39c9a7656 | ||
![]() |
bdb02ed535 | ||
![]() |
b5f812cd32 | ||
![]() |
abfd8fa70b | ||
![]() |
6eb942cf45 | ||
![]() |
9dab298f0d | ||
![]() |
6a26322eb3 | ||
![]() |
23106ac0f5 | ||
![]() |
29d1bc6546 | ||
![]() |
c963bdee8b | ||
![]() |
6b3518d6fd | ||
![]() |
6a31ca7386 | ||
![]() |
8664abc178 | ||
![]() |
a3d8e95e76 | ||
![]() |
1ab6f30fdd | ||
![]() |
7dd3592eab | ||
![]() |
e602c40d09 | ||
![]() |
270cbf08e3 | ||
![]() |
4ddc0ff218 | ||
![]() |
73005166ef | ||
![]() |
204b49fc1f | ||
![]() |
de3c0e62d0 | ||
![]() |
e4e4bf75ca | ||
![]() |
af2f07852c | ||
![]() |
e2e7b0788f | ||
![]() |
d27e0bff5a | ||
![]() |
fafe1cb7e8 | ||
![]() |
3e2f890467 | ||
![]() |
6fab0e1b9c | ||
![]() |
f4c9161f84 | ||
![]() |
fd095a3660 | ||
![]() |
f6a9ef5ef5 | ||
![]() |
df10e88e97 | ||
![]() |
99076660d4 | ||
![]() |
80713e234c | ||
![]() |
1374fea5d9 | ||
![]() |
7274d8bca2 | ||
![]() |
73208f5835 | ||
![]() |
107693fbd1 | ||
![]() |
31dcdf7262 | ||
![]() |
b2968c817f | ||
![]() |
213ec6df5f | ||
![]() |
5823a9b302 | ||
![]() |
01cbf3b81c | ||
![]() |
6e68792ded | ||
![]() |
2971a630b8 | ||
![]() |
bf7ce7e4e9 | ||
![]() |
c5c809ee3e | ||
![]() |
9a8d7ea3cb | ||
![]() |
a68701c636 | ||
![]() |
1212847eee | ||
![]() |
768ea7e8f7 | ||
![]() |
9b66138054 | ||
![]() |
cf7e40f03c | ||
![]() |
8c25b17d8e | ||
![]() |
a7a37e4de6 | ||
![]() |
37a1885deb | ||
![]() |
81e4155eaf | ||
![]() |
7d666fc220 | ||
![]() |
c4e50c9efb | ||
![]() |
caed90fcf2 | ||
![]() |
84100afc91 | ||
![]() |
37e4d32d53 | ||
![]() |
c0bb2b9943 | ||
![]() |
b1755c4fb3 | ||
![]() |
2474b91078 | ||
![]() |
dcd19e7982 | ||
![]() |
8be614729c | ||
![]() |
201f5bdfe8 | ||
![]() |
ef32ff0e4c | ||
![]() |
2754f2f506 | ||
![]() |
89f442392e | ||
![]() |
9ead83caa7 | ||
![]() |
737f09f2b0 | ||
![]() |
0c25015fdc | ||
![]() |
e6d1485c28 | ||
![]() |
e9e0cd0728 | ||
![]() |
09b52c4f04 | ||
![]() |
eb5061d54c | ||
![]() |
b124fbb0c8 | ||
![]() |
9b239392b1 | ||
![]() |
05e933d7af | ||
![]() |
65a7ceb3ce | ||
![]() |
bdf7754552 | ||
![]() |
b6f7fa6eb5 | ||
![]() |
2f85d3cdb9 | ||
![]() |
a6d26598ef | ||
![]() |
d699478ab8 | ||
![]() |
68d488546f | ||
![]() |
1d2798dd77 | ||
![]() |
6ce0d934cf | ||
![]() |
ec720dd148 | ||
![]() |
d52a1b8279 | ||
![]() |
e8bcb43695 | ||
![]() |
220a87812c | ||
![]() |
10695f1ed3 | ||
![]() |
350372e3bf | ||
![]() |
cd91abcf88 | ||
![]() |
c865aaaa0f | ||
![]() |
4318ceb2b3 | ||
![]() |
8a32f72829 | ||
![]() |
06c8fdafd4 | ||
![]() |
b22728d55c | ||
![]() |
c869f3639d | ||
![]() |
d00fc55e41 | ||
![]() |
09378f56c0 | ||
![]() |
f444303ce5 | ||
![]() |
4c0f1bf4e4 | ||
![]() |
a81ec88c6c | ||
![]() |
657a5c85cc | ||
![]() |
437a272854 | ||
![]() |
bfb811b7d3 | ||
![]() |
79c2d55830 | ||
![]() |
e64659b008 | ||
![]() |
51ad841f1e | ||
![]() |
9d05c7ba76 | ||
![]() |
1fd15703cd | ||
![]() |
538744c9ac | ||
![]() |
39cf1b2736 | ||
![]() |
3773185639 | ||
![]() |
f22f857ece | ||
![]() |
3555446f73 | ||
![]() |
42c230dfbe | ||
![]() |
6110aa374c | ||
![]() |
2d047d1f51 | ||
![]() |
5f50f3329f | ||
![]() |
1fa5642858 | ||
![]() |
09cc439572 | ||
![]() |
09fa9cdaae | ||
![]() |
de0d618730 | ||
![]() |
2ccbc00fd9 | ||
![]() |
51a22d5db7 | ||
![]() |
7f77ca4efb | ||
![]() |
83110cfd4c | ||
![]() |
c02539bd29 | ||
![]() |
0b7aff2ad7 | ||
![]() |
cd25599eba | ||
![]() |
77a9004c31 | ||
![]() |
e42af64e24 | ||
![]() |
fbed679dd0 | ||
![]() |
44e251d974 | ||
![]() |
2965c501a5 | ||
![]() |
8d881cb7ee | ||
![]() |
c7ba2e9663 | ||
![]() |
e911f8ab3c | ||
![]() |
1c7aa12615 | ||
![]() |
f0875b2fef | ||
![]() |
920f695d1d | ||
![]() |
607f2a0c1c | ||
![]() |
6cd8583165 | ||
![]() |
06eda406cc | ||
![]() |
4784ec67b2 | ||
![]() |
264b00bff4 | ||
![]() |
b008d2b1fe | ||
![]() |
78850f38eb | ||
![]() |
cc8bb38aab | ||
![]() |
524d4e5071 | ||
![]() |
66fc6940a0 | ||
![]() |
aef4696593 | ||
![]() |
229bcd9f03 | ||
![]() |
345617ecb5 | ||
![]() |
6fb8122187 | ||
![]() |
a5fbf8fbae | ||
![]() |
3b23b42519 | ||
![]() |
84e2469e41 | ||
![]() |
2ca44a6f6d | ||
![]() |
fb8c954e2e | ||
![]() |
e89f2c0e91 | ||
![]() |
0d4f69f28c | ||
![]() |
c3370897cf | ||
![]() |
b686b73d6f | ||
![]() |
ffbc00de21 | ||
![]() |
41ae83463e | ||
![]() |
128d788363 | ||
![]() |
26ed2776e7 | ||
![]() |
d88d887ed0 | ||
![]() |
371bc37dd4 | ||
![]() |
3fafbafab0 | ||
![]() |
4ec70ca2ff | ||
![]() |
afc1134fe6 | ||
![]() |
8dff43b8ea | ||
![]() |
d3f67a4855 | ||
![]() |
e44de970f8 | ||
![]() |
5916afec84 | ||
![]() |
81be31aee0 | ||
![]() |
fc46db2269 | ||
![]() |
8c8b934fd8 | ||
![]() |
2738bc17a1 | ||
![]() |
ce199e1c67 | ||
![]() |
a3f76740e7 | ||
![]() |
be59f2bff0 | ||
![]() |
d4df3b31fb | ||
![]() |
597358e735 | ||
![]() |
fa715c9892 | ||
![]() |
80473283f3 | ||
![]() |
21e2ba13dc | ||
![]() |
df0d86d795 | ||
![]() |
0f514a390a | ||
![]() |
c513ba25f1 | ||
![]() |
250a08ab7a | ||
![]() |
534b5d28e8 | ||
![]() |
20698f8f7e | ||
![]() |
bd8ae72146 | ||
![]() |
f67d3b127c | ||
![]() |
22bc189e0e | ||
![]() |
7ddd6ad461 | ||
![]() |
29098a1e07 | ||
![]() |
053fa2a47b | ||
![]() |
25a4c5ad18 | ||
![]() |
4cd2cfc7e7 | ||
![]() |
874a35e30d | ||
![]() |
f6cb076229 | ||
![]() |
ea71eb35d3 | ||
![]() |
24ea784dc4 | ||
![]() |
ff9c7380e8 | ||
![]() |
c37aee4620 | ||
![]() |
4384ff8e41 | ||
![]() |
420113d5ab | ||
![]() |
66a8993092 | ||
![]() |
38803e3597 | ||
![]() |
6ed4cf4016 | ||
![]() |
0dd6b1e134 | ||
![]() |
1c90b25933 | ||
![]() |
1c204bef8a | ||
![]() |
9b66053d99 | ||
![]() |
cfbefee0fa | ||
![]() |
98d4a7af24 | ||
![]() |
91b3dcca26 | ||
![]() |
d69a22f160 | ||
![]() |
b92fa6bbf9 | ||
![]() |
97993ac38a | ||
![]() |
ad66b758e4 | ||
![]() |
0a6e98cdb5 | ||
![]() |
2aea624dca | ||
![]() |
0053117ac8 | ||
![]() |
c178000d18 | ||
![]() |
4af6d6bb1b | ||
![]() |
8f238c03ad | ||
![]() |
15653868c8 | ||
![]() |
159ac3e3fb | ||
![]() |
b11f8aa4ea | ||
![]() |
4ac246760e | ||
![]() |
63f950768f | ||
![]() |
e2fe415ae6 | ||
![]() |
988c67fff2 | ||
![]() |
b5c82aa986 | ||
![]() |
5f3c25f6e9 | ||
![]() |
978191aff5 | ||
![]() |
4b870196c0 | ||
![]() |
575e321cc5 | ||
![]() |
20394a97da | ||
![]() |
63ac1b6620 | ||
![]() |
574ade6f76 | ||
![]() |
56bb98c542 | ||
![]() |
929eb311c7 | ||
![]() |
b21649e6b8 | ||
![]() |
70b32b53fc | ||
![]() |
269b6ced99 | ||
![]() |
196a0a91a5 | ||
![]() |
468823d1b9 | ||
![]() |
ccdf418e52 | ||
![]() |
b172b43fa9 | ||
![]() |
bdc3bde74b | ||
![]() |
4e9bccf2ef | ||
![]() |
d92eff7e89 | ||
![]() |
1f191ef37c | ||
![]() |
d734bfda18 | ||
![]() |
0026d60b60 | ||
![]() |
15bc4faf2d | ||
![]() |
7d0878f5c4 | ||
![]() |
a76365c72b | ||
![]() |
73923f1e93 | ||
![]() |
fa729858ac | ||
![]() |
62d59f0fb7 | ||
![]() |
a62210efb9 | ||
![]() |
d1ee325ecd | ||
![]() |
88d24150e6 | ||
![]() |
420a8c2eb2 | ||
![]() |
5698850dc4 | ||
![]() |
c7e8bdf9cf | ||
![]() |
d33d9d1f03 | ||
![]() |
a4698f6122 | ||
![]() |
fc840c904b | ||
![]() |
e477101345 | ||
![]() |
171001ca84 | ||
![]() |
6fa803a38d | ||
![]() |
cf8d1b0387 | ||
![]() |
0a0338ddfa | ||
![]() |
693c4d8f3a | ||
![]() |
507d3c841c | ||
![]() |
2dd2a5b167 | ||
![]() |
a60e3f80f6 | ||
![]() |
6d810cb2e7 | ||
![]() |
413919be1f | ||
![]() |
73a65dc370 | ||
![]() |
0df067e64f | ||
![]() |
413ea10e78 | ||
![]() |
71cd303362 | ||
![]() |
db08ce6105 | ||
![]() |
ac8521e9b3 | ||
![]() |
d628e3ba5c | ||
![]() |
be3e6a0e9b | ||
![]() |
cd8f7d844d | ||
![]() |
2a20943f9b | ||
![]() |
437c1e438e | ||
![]() |
55e218649a | ||
![]() |
f9703c1c9f | ||
![]() |
861abb512e | ||
![]() |
8867827a89 | ||
![]() |
a9bc118031 | ||
![]() |
ab5954520f | ||
![]() |
1e708bdb45 | ||
![]() |
60eef9c0de | ||
![]() |
b5f587bbe0 | ||
![]() |
b1abfd3ff6 | ||
![]() |
b4c6c11e68 | ||
![]() |
886e94d0ee | ||
![]() |
de88d2c7cc | ||
![]() |
f591e9788d | ||
![]() |
3df1d9062e | ||
![]() |
771e73dfa4 | ||
![]() |
d02d683126 | ||
![]() |
3408440991 | ||
![]() |
3baac2faac | ||
![]() |
69ce54b86a | ||
![]() |
84613da90a | ||
![]() |
4b89f6a90b | ||
![]() |
c26f328e1a | ||
![]() |
d969320ba1 | ||
![]() |
72acc54d84 | ||
![]() |
711ed17606 | ||
![]() |
c1de2e926d | ||
![]() |
189ff91f25 | ||
![]() |
bbfaf4e816 | ||
![]() |
54e8e19a60 | ||
![]() |
e8f284bf52 | ||
![]() |
d7771f190f | ||
![]() |
095f327f32 | ||
![]() |
a904418270 | ||
![]() |
b42b0cd45a | ||
![]() |
adb507bdd9 | ||
![]() |
db00cf24c0 | ||
![]() |
c114cf019d | ||
![]() |
7ea94d1103 | ||
![]() |
ace3753076 | ||
![]() |
d6cbaee16b | ||
![]() |
0d5e1c7db9 | ||
![]() |
69464bf36f | ||
![]() |
e3096e94b1 | ||
![]() |
c9327649c0 | ||
![]() |
68f696af64 | ||
![]() |
737936d02c | ||
![]() |
a7ed20cb92 | ||
![]() |
e29168ad02 | ||
![]() |
be2e224e75 | ||
![]() |
4d96f2668b | ||
![]() |
ac2444f1d5 | ||
![]() |
6104c31556 | ||
![]() |
af468235e2 | ||
![]() |
c8efec0295 | ||
![]() |
e5bbb6e5b4 | ||
![]() |
0a41d4ebb8 | ||
![]() |
ca260a3d63 | ||
![]() |
4eaa5a2635 | ||
![]() |
aeacc2ff92 | ||
![]() |
b69c4a66e7 | ||
![]() |
de3fa5556a | ||
![]() |
06a292290e | ||
![]() |
1454935edc | ||
![]() |
520a465190 | ||
![]() |
ab39f548dc | ||
![]() |
26c3df20f1 | ||
![]() |
6472ee8c76 | ||
![]() |
8a8aa16f1b | ||
![]() |
43c135e3ce | ||
![]() |
1c350854f8 | ||
![]() |
98549ddbe5 | ||
![]() |
4a19741a36 | ||
![]() |
cef3a2a6ee | ||
![]() |
c912911d0e | ||
![]() |
6d30299d80 | ||
![]() |
cb87271a01 | ||
![]() |
9edd281044 | ||
![]() |
e666a3b366 | ||
![]() |
eef514a1dc | ||
![]() |
9e01fcf0de | ||
![]() |
696f9458c2 | ||
![]() |
de6d3ef1ee | ||
![]() |
24048b3545 | ||
![]() |
5f44b0ad48 | ||
![]() |
f68b91defe | ||
![]() |
7339f2d476 | ||
![]() |
63e04ce220 | ||
![]() |
d6432e9718 | ||
![]() |
1cf43cd2fc | ||
![]() |
ed0c3233db | ||
![]() |
794931f0d7 | ||
![]() |
0d54bd68c1 | ||
![]() |
4208cf66be | ||
![]() |
a30e6c6158 | ||
![]() |
bb20cadd91 | ||
![]() |
0beb35e426 | ||
![]() |
25c09e7a56 | ||
![]() |
e083d32f10 | ||
![]() |
7b0d869c3c | ||
![]() |
dc3e1d9a40 | ||
![]() |
0c65433dd3 | ||
![]() |
b89286d0de | ||
![]() |
accdf4445d | ||
![]() |
0825463904 | ||
![]() |
91b4d974f1 | ||
![]() |
8735d0a281 | ||
![]() |
68dbca64e7 | ||
![]() |
0b6a0dd7fa | ||
![]() |
4e0f97bee3 | ||
![]() |
c0b6d42b23 | ||
![]() |
537d316311 | ||
![]() |
7ad72de0d3 | ||
![]() |
8cfb0a0d52 | ||
![]() |
7011608be0 | ||
![]() |
8e59c847dd | ||
![]() |
f12fccce65 | ||
![]() |
497dfee320 | ||
![]() |
dfc0aa86ee | ||
![]() |
2db3459e6b | ||
![]() |
feb0556664 | ||
![]() |
1f59163e8c | ||
![]() |
e1b2ab0105 | ||
![]() |
147c4fb96d | ||
![]() |
df8e51d6e5 | ||
![]() |
5a49264e19 | ||
![]() |
4a4d1759f5 | ||
![]() |
80592613ad | ||
![]() |
a68abc15c5 | ||
![]() |
9237a9f244 | ||
![]() |
7a458be3eb | ||
![]() |
9c96cc578e | ||
![]() |
1cf52de47a | ||
![]() |
8a038ef64c | ||
![]() |
bb985e40dd | ||
![]() |
1c06ec0c11 | ||
![]() |
4e885b4358 | ||
![]() |
9b48827a10 | ||
![]() |
7c5f48c99b | ||
![]() |
1ae760ef31 | ||
![]() |
04c5582eb2 | ||
![]() |
78a5e98721 | ||
![]() |
669769c090 | ||
![]() |
cb53a9cc14 | ||
![]() |
ad16eeb9af | ||
![]() |
f6d9a1876a | ||
![]() |
af806a8c1e | ||
![]() |
5ec52fc3f8 | ||
![]() |
c543b86e81 | ||
![]() |
3be54d4aab | ||
![]() |
93b694f973 | ||
![]() |
30e559592a | ||
![]() |
d4f498db7c | ||
![]() |
72fb3f768f | ||
![]() |
b46061aa42 | ||
![]() |
3724c78a25 | ||
![]() |
2fd24f8542 | ||
![]() |
c1567463b0 | ||
![]() |
889ece85ed | ||
![]() |
5d0c7d4ba2 | ||
![]() |
3408d22df8 | ||
![]() |
aa9f560128 | ||
![]() |
a2ebeb8e76 | ||
![]() |
df10ffe20d | ||
![]() |
08629d8fb4 | ||
![]() |
b3b01a47d2 | ||
![]() |
d580c5506c | ||
![]() |
0bf5156caf | ||
![]() |
9abd77c517 | ||
![]() |
887820ecb5 | ||
![]() |
55e247b407 | ||
![]() |
8ec0ef4c6d | ||
![]() |
f7be6f94ea | ||
![]() |
967743adc7 | ||
![]() |
55fe16991c | ||
![]() |
db909353e5 | ||
![]() |
7ad7fa4da9 | ||
![]() |
899e08a180 | ||
![]() |
d511364a43 | ||
![]() |
4d5f7b361e | ||
![]() |
19677c5ad1 | ||
![]() |
2c87992506 | ||
![]() |
330507f329 | ||
![]() |
8080a5e5b2 | ||
![]() |
3a698112cc | ||
![]() |
b8b8450400 | ||
![]() |
d74b296752 | ||
![]() |
7845939722 | ||
![]() |
fdcd7f96e5 | ||
![]() |
339c2290e7 | ||
![]() |
fc50e04b59 | ||
![]() |
1bf9c10f0c | ||
![]() |
feb229a5f9 | ||
![]() |
efd9884e83 | ||
![]() |
846ab65cc0 | ||
![]() |
29c7542c48 | ||
![]() |
3f9a5eda16 | ||
![]() |
fefedbe653 | ||
![]() |
be90bdc355 | ||
![]() |
b074dc17b1 | ||
![]() |
fa503ef0e2 | ||
![]() |
99eb98d029 | ||
![]() |
90da25e24e | ||
![]() |
624c72afae | ||
![]() |
c56f2a935d | ||
![]() |
b8afc0fd29 | ||
![]() |
c37df94932 | ||
![]() |
64f31c4579 | ||
![]() |
e96ba16555 | ||
![]() |
fd55d627a7 | ||
![]() |
753fa4ed08 | ||
![]() |
f66571ffe1 | ||
![]() |
fa4b9a6abc | ||
![]() |
0a3f875b95 | ||
![]() |
29f10624bd | ||
![]() |
1c07dd1adb | ||
![]() |
8126a13211 | ||
![]() |
b24ba28774 | ||
![]() |
369ccb953f | ||
![]() |
3004f33c58 | ||
![]() |
a9e7f3a4e7 | ||
![]() |
56c8f533cd | ||
![]() |
e6e21b16d8 | ||
![]() |
555c054984 | ||
![]() |
b37bf93aa2 | ||
![]() |
c33ec328fb | ||
![]() |
652f35a39f | ||
![]() |
e3fdbb976e | ||
![]() |
f095383caf | ||
![]() |
94767ea573 | ||
![]() |
28872955d5 | ||
![]() |
e1d7275f92 | ||
![]() |
04520ebdea | ||
![]() |
819f288587 | ||
![]() |
8ccdcf2e84 | ||
![]() |
df77922d22 | ||
![]() |
9a0febab89 | ||
![]() |
231a36c5fd | ||
![]() |
d79022f842 | ||
![]() |
3c5287c458 | ||
![]() |
88be996d45 | ||
![]() |
9828df7335 | ||
![]() |
657b3ec052 | ||
![]() |
80813b61ff | ||
![]() |
e4fa31230c | ||
![]() |
a6f839b880 | ||
![]() |
09540d411e | ||
![]() |
aaad65fbd8 | ||
![]() |
e3e50b3af9 | ||
![]() |
047c9704df | ||
![]() |
1ee8947677 | ||
![]() |
7f24feb5a4 | ||
![]() |
326fe433b3 | ||
![]() |
60765d38d0 | ||
![]() |
667ab50199 | ||
![]() |
3228c35df6 | ||
![]() |
9fb1c3e143 | ||
![]() |
e05be70bcb | ||
![]() |
81bad21d3a | ||
![]() |
f58b2e03ca | ||
![]() |
34b763f792 | ||
![]() |
9d6d2f0f9b | ||
![]() |
6d22e9cd7b | ||
![]() |
1784b05eaf | ||
![]() |
872785db16 | ||
![]() |
4c7aed5d57 | ||
![]() |
e957c58a1e | ||
![]() |
112c1751d7 | ||
![]() |
e0017a6666 | ||
![]() |
084bafe18c | ||
![]() |
775c8223c3 | ||
![]() |
ebf2076755 | ||
![]() |
556975564c | ||
![]() |
d14520d6d8 | ||
![]() |
78f65e7ce0 | ||
![]() |
c9fe3af92d | ||
![]() |
c6ffec1d78 | ||
![]() |
0e177cb95f | ||
![]() |
72585afcef | ||
![]() |
4037a94d1e | ||
![]() |
b4e757dc35 | ||
![]() |
d734df705a | ||
![]() |
2a858f8f3d | ||
![]() |
0edc55adc2 | ||
![]() |
7b7f758db3 | ||
![]() |
8cc54036b5 | ||
![]() |
b3bdc2ef38 | ||
![]() |
a53f4c36c6 | ||
![]() |
4682ff0cc4 | ||
![]() |
fabe86be96 | ||
![]() |
9797c8f060 | ||
![]() |
02c5c76f0b | ||
![]() |
07fab46262 | ||
![]() |
0edb7937e7 | ||
![]() |
46fa8481d9 | ||
![]() |
a2a2d6ab7e | ||
![]() |
aaeaa0516d | ||
![]() |
463c704265 | ||
![]() |
cd118341e9 | ||
![]() |
2d1631c9fd | ||
![]() |
cdd6c71f66 | ||
![]() |
7d334471d3 | ||
![]() |
8cb3253a04 | ||
![]() |
9add3182c7 | ||
![]() |
f830585994 | ||
![]() |
2e80b60a04 | ||
![]() |
ace28e2ef5 | ||
![]() |
4a44f023e8 | ||
![]() |
1b26c47cb8 | ||
![]() |
aeab3b2872 | ||
![]() |
727f43f69f | ||
![]() |
ba2e186f31 | ||
![]() |
de8d4e9d9a | ||
![]() |
01ca429c9a | ||
![]() |
83d0e20ae8 | ||
![]() |
85b49f115f | ||
![]() |
451f484c9a | ||
![]() |
bbd80e5cf3 | ||
![]() |
d8f655159a | ||
![]() |
94e5c1d078 | ||
![]() |
0b9b3f6f79 | ||
![]() |
5b5f99bbd4 | ||
![]() |
eff7f20118 | ||
![]() |
3fb5c13983 | ||
![]() |
57a9fb7610 | ||
![]() |
4c3005673e | ||
![]() |
89b57929f2 |
13
.codecov.yml
13
.codecov.yml
@@ -19,3 +19,16 @@ comment: off
|
||||
# annotations in files that seemingly have nothing to do with the PR.
|
||||
github_checks:
|
||||
annotations: false
|
||||
|
||||
# Attempt to fix "Missing base commit" messages in the codecov UI.
|
||||
# Because we do not run full tests on package PRs, package PRs' merge
|
||||
# commits on `develop` don't have coverage info. It appears that
|
||||
# codecov will give you an error if the pseudo-base's coverage data
|
||||
# doesn't all apply properly to the real PR base.
|
||||
#
|
||||
# See here for docs:
|
||||
# https://docs.codecov.com/docs/comparing-commits#pseudo-comparison
|
||||
# See here for another potential solution:
|
||||
# https://community.codecov.com/t/2480/15
|
||||
codecov:
|
||||
allow_coverage_offsets: true
|
||||
|
38
.coveragerc
38
.coveragerc
@@ -1,38 +0,0 @@
|
||||
# -*- conf -*-
|
||||
# .coveragerc to control coverage.py
|
||||
[run]
|
||||
parallel = True
|
||||
concurrency = multiprocessing
|
||||
branch = True
|
||||
source =
|
||||
bin
|
||||
lib
|
||||
omit =
|
||||
lib/spack/spack/test/*
|
||||
lib/spack/docs/*
|
||||
lib/spack/external/*
|
||||
share/spack/qa/*
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
# Have to re-enable the standard pragma
|
||||
pragma: no cover
|
||||
|
||||
# Don't complain about missing debug-only code:
|
||||
def __repr__
|
||||
if self\.debug
|
||||
|
||||
# Don't complain if tests don't hit defensive assertion code:
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
|
||||
# Don't complain if non-runnable code isn't run:
|
||||
if 0:
|
||||
if False:
|
||||
if __name__ == .__main__.:
|
||||
|
||||
ignore_errors = True
|
||||
|
||||
[html]
|
||||
directory = htmlcov
|
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,42 +0,0 @@
|
||||
---
|
||||
name: "\U0001F41E Bug report"
|
||||
about: Report a bug in the core of Spack (command not working as expected, etc.)
|
||||
labels: "bug,triage"
|
||||
---
|
||||
|
||||
<!-- Explain, in a clear and concise way, the command you ran and the result you were trying to achieve.
|
||||
Example: "I ran `spack find` to list all the installed packages and ..." -->
|
||||
|
||||
### Steps to reproduce the issue
|
||||
|
||||
```console
|
||||
$ spack <command1> <spec>
|
||||
$ spack <command2> <spec>
|
||||
...
|
||||
```
|
||||
|
||||
### Error Message
|
||||
|
||||
<!-- If Spack reported an error, provide the error message. If it did not report an error but the output appears incorrect, provide the incorrect output. If there was no error message and no output but the result is incorrect, describe how it does not match what you expect. -->
|
||||
```console
|
||||
$ spack --debug --stacktrace <command>
|
||||
```
|
||||
|
||||
### Information on your system
|
||||
|
||||
<!-- Please include the output of `spack debug report` -->
|
||||
|
||||
<!-- If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well. -->
|
||||
|
||||
### Additional information
|
||||
|
||||
<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->
|
||||
- [ ] I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
||||
- [ ] I have run the failing commands in debug mode and reported the output
|
||||
|
||||
<!-- We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on our Slack first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack! -->
|
58
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
58
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: "\U0001F41E Bug report"
|
||||
description: Report a bug in the core of Spack (command not working as expected, etc.)
|
||||
labels: [bug, triage]
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Explain, in a clear and concise way, the command you ran and the result you were trying to achieve.
|
||||
Example: "I ran `spack find` to list all the installed packages and ..."
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack <command1> <spec>
|
||||
$ spack <command2> <spec>
|
||||
...
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: error
|
||||
attributes:
|
||||
label: Error message
|
||||
description: |
|
||||
If Spack reported an error, provide the error message. If it did not report an error but the output appears incorrect, provide the incorrect output. If there was no error message and no output but the result is incorrect, describe how it does not match what you expect.
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack --debug --stacktrace <command>
|
||||
```
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
- label: I have run the failing commands in debug mode and reported the output
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on [our Slack](https://slack.spack.io/) first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
43
.github/ISSUE_TEMPLATE/build_error.md
vendored
43
.github/ISSUE_TEMPLATE/build_error.md
vendored
@@ -1,43 +0,0 @@
|
||||
---
|
||||
name: "\U0001F4A5 Build error"
|
||||
about: Some package in Spack didn't build correctly
|
||||
title: "Installation issue: "
|
||||
labels: "build-error"
|
||||
---
|
||||
|
||||
<!-- Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
|
||||
1. Title the issue "Installation issue: <name-of-the-package>".
|
||||
2. Provide the information required below.
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively! -->
|
||||
|
||||
### Steps to reproduce the issue
|
||||
|
||||
<!-- Fill in the exact spec you are trying to build and the relevant part of the error message -->
|
||||
```console
|
||||
$ spack install <spec>
|
||||
...
|
||||
```
|
||||
|
||||
### Information on your system
|
||||
|
||||
<!-- Please include the output of `spack debug report` -->
|
||||
|
||||
<!-- If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well. -->
|
||||
|
||||
### Additional information
|
||||
|
||||
<!-- Please upload the following files. They should be present in the stage directory of the failing build. Also upload any config.log or similar file if one exists. -->
|
||||
* [spack-build-out.txt]()
|
||||
* [spack-build-env.txt]()
|
||||
|
||||
<!-- Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and @mention them here if they exist. -->
|
||||
|
||||
### General information
|
||||
|
||||
<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->
|
||||
- [ ] I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
- [ ] I have run `spack maintainers <name-of-the-package>` and @mentioned any maintainers
|
||||
- [ ] I have uploaded the build log and environment files
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
64
.github/ISSUE_TEMPLATE/build_error.yml
vendored
Normal file
64
.github/ISSUE_TEMPLATE/build_error.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: "\U0001F4A5 Build error"
|
||||
description: Some package in Spack didn't build correctly
|
||||
title: "Installation issue: "
|
||||
labels: [build-error]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
1. Title the issue `Installation issue: <name-of-the-package>`.
|
||||
2. Provide the information required below.
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
description: |
|
||||
Fill in the exact spec you are trying to build and the relevant part of the error message
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack install <spec>
|
||||
...
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: |
|
||||
Please upload the following files:
|
||||
* **`spack-build-out.txt`**
|
||||
* **`spack-build-env.txt`**
|
||||
|
||||
They should be present in the stage directory of the failing build. Also upload any `config.log` or similar file if one exists.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and **@mention** them here if they exist.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
required: true
|
||||
- label: I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers
|
||||
required: true
|
||||
- label: I have uploaded the build log and environment files
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
blank_issues_enabled: true
|
33
.github/ISSUE_TEMPLATE/feature_request.md
vendored
33
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,33 +0,0 @@
|
||||
---
|
||||
name: "\U0001F38A Feature request"
|
||||
about: Suggest adding a feature that is not yet in Spack
|
||||
labels: feature
|
||||
|
||||
---
|
||||
|
||||
<!--*Please add a concise summary of your suggestion here.*-->
|
||||
|
||||
### Rationale
|
||||
|
||||
<!--*Is your feature request related to a problem? Please describe it!*-->
|
||||
|
||||
### Description
|
||||
|
||||
<!--*Describe the solution you'd like and the alternatives you have considered.*-->
|
||||
|
||||
|
||||
### Additional information
|
||||
<!--*Add any other context about the feature request here.*-->
|
||||
|
||||
|
||||
### General information
|
||||
|
||||
- [ ] I have run `spack --version` and reported the version of Spack
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
||||
|
||||
|
||||
|
||||
<!--If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on our Slack first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
||||
-->
|
41
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
41
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: "\U0001F38A Feature request"
|
||||
description: Suggest adding a feature that is not yet in Spack
|
||||
labels: [feature]
|
||||
body:
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Please add a concise summary of your suggestion here.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: rationale
|
||||
attributes:
|
||||
label: Rationale
|
||||
description: Is your feature request related to a problem? Please describe it!
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe the solution you'd like and the alternatives you have considered.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Add any other context about the feature request here.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack --version` and reported the version of Spack
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on [our Slack](https://slack.spack.io/) first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
161
.github/workflows/bootstrap.yml
vendored
Normal file
161
.github/workflows/bootstrap.yml
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
name: Bootstrapping
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
schedule:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
opensuse-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/tumbleweed:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
zypper update -y
|
||||
zypper install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
72
.github/workflows/build-containers.yml
vendored
Normal file
72
.github/workflows/build-containers.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: Build & Deploy Docker Containers
|
||||
on:
|
||||
# Build new Spack develop containers nightly.
|
||||
schedule:
|
||||
- cron: '34 0 * * *'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
# Even if one container fails to build we still want the others
|
||||
# to continue their builds.
|
||||
fail-fast: false
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
container="ghcr.io/spack/${{ matrix.dockerfile[0]}}:latest"
|
||||
echo "container=${container}" >> $GITHUB_ENV
|
||||
echo "versioned=${container}" >> $GITHUB_ENV
|
||||
|
||||
# On a new release create a container with the same tag as the release.
|
||||
- name: Set Container Tag on Release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
versioned="ghcr.io/spack/${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Check ${{ matrix.dockerfile[1] }} Exists
|
||||
run: |
|
||||
printf "Preparing to build ${{ env.container }} from ${{ matrix.dockerfile[1] }}"
|
||||
if [ ! -f "share/spack/docker/${{ matrix.dockerfile[1]}}" ]; then
|
||||
printf "Dockerfile ${{ matrix.dockerfile[0]}} does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: share/spack/docker/${{matrix.dockerfile[1]}}
|
||||
platforms: ${{ matrix.dockerfile[2] }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.container }}
|
||||
${{ env.versioned }}
|
77
.github/workflows/linux_build_tests.yaml
vendored
77
.github/workflows/linux_build_tests.yaml
vendored
@@ -1,77 +0,0 @@
|
||||
name: linux builds
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/lz4/**'
|
||||
- '!var/spack/repos/builtin/packages/mpich/**'
|
||||
- '!var/spack/repos/builtin/packages/tut/**'
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/lz4/**'
|
||||
- '!var/spack/repos/builtin/packages/mpich/**'
|
||||
- '!var/spack/repos/builtin/packages/tut/**'
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- lz4 # MakefilePackage
|
||||
- mpich~fortran # AutotoolsPackage
|
||||
- 'tut%gcc@:10.99.99' # WafPackage
|
||||
- py-setuptools # PythonPackage
|
||||
- openjpeg # CMakePackage
|
||||
- r-rcpp # RPackage
|
||||
- ruby-rake # RubyPackage
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ccache-build-${{ matrix.package }}
|
||||
restore-keys: |
|
||||
ccache-build-${{ matrix.package }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System Packages
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get -yqq install ccache gfortran perl perl-base r-base r-base-core r-base-dev ruby findutils openssl libssl-dev libpciaccess-dev
|
||||
R --version
|
||||
perl --version
|
||||
ruby --version
|
||||
- name: Copy Configuration
|
||||
run: |
|
||||
ccache -M 300M && ccache -z
|
||||
# Set up external deps for build tests, b/c they take too long to compile
|
||||
cp share/spack/qa/configuration/*.yaml etc/spack/
|
||||
- name: Run the build test
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
SPEC=${{ matrix.package }} share/spack/qa/run-build-tests
|
||||
ccache -s
|
87
.github/workflows/unit_tests.yaml
vendored
87
.github/workflows/unit_tests.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black types-six
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -130,37 +130,28 @@ jobs:
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml]
|
||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.900 black
|
||||
fi
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Bootstrap clingo from sources
|
||||
- name: Bootstrap clingo
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack external find --not-buildable cmake bison
|
||||
spack bootstrap untrust spack-install
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
@@ -180,7 +171,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@v1
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -199,29 +190,15 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml]
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 38
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Run shell tests (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
@@ -232,7 +209,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@v1
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -309,24 +286,10 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
patchelf kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage clingo
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml] clingo
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -348,7 +311,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@v1
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -369,27 +332,33 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade codecov coverage
|
||||
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.800
|
||||
pip install --upgrade codecov coverage[toml]
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap untrust spack-install
|
||||
$(which spack) solve zlib
|
||||
if [ "${{ needs.changes.outputs.with_coverage }}" == "true" ]
|
||||
then
|
||||
coverage run $(which spack) unit-test -x
|
||||
coverage combine
|
||||
coverage xml
|
||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
||||
rm lib/spack/docs/_spack_root
|
||||
else
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@v1
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
files: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -132,6 +132,7 @@ celerybeat.pid
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
!/lib/spack/env
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
|
35
.mypy.ini
35
.mypy.ini
@@ -1,35 +0,0 @@
|
||||
[mypy]
|
||||
python_version = 3.7
|
||||
files=lib/spack/llnl/**/*.py,lib/spack/spack/**/*.py
|
||||
mypy_path=bin,lib/spack,lib/spack/external,var/spack/repos/builtin
|
||||
# This and a generated import file allows supporting packages
|
||||
namespace_packages=True
|
||||
# To avoid re-factoring all the externals, ignore errors and missing imports
|
||||
# globally, then turn back on in spack and spack submodules
|
||||
ignore_errors=True
|
||||
ignore_missing_imports=True
|
||||
|
||||
[mypy-spack.*]
|
||||
ignore_errors=False
|
||||
ignore_missing_imports=False
|
||||
|
||||
[mypy-packages.*]
|
||||
ignore_errors=False
|
||||
ignore_missing_imports=False
|
||||
|
||||
[mypy-llnl.*]
|
||||
ignore_errors=False
|
||||
ignore_missing_imports=False
|
||||
|
||||
[mypy-spack.test.packages]
|
||||
ignore_errors=True
|
||||
|
||||
# ignore errors in fake import path for packages
|
||||
[mypy-spack.pkg.*]
|
||||
ignore_errors=True
|
||||
ignore_missing_imports=True
|
||||
|
||||
# jinja has syntax in it that requires python3 and causes a parse error
|
||||
# skip importing it
|
||||
[mypy-jinja2]
|
||||
follow_imports=skip
|
@@ -1,7 +1,7 @@
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://spack.readthedocs.io)
|
||||
@@ -36,6 +36,8 @@ Documentation
|
||||
[**Full documentation**](https://spack.readthedocs.io/) is available, or
|
||||
run `spack help` or `spack help --all`.
|
||||
|
||||
For a cheat sheet on Spack syntax, run `spack help --spec`.
|
||||
|
||||
Tutorial
|
||||
----------------
|
||||
|
||||
|
23
bin/spack
23
bin/spack
@@ -28,6 +28,7 @@ exit 1
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 5)
|
||||
@@ -70,6 +71,28 @@ if "ruamel.yaml" in sys.modules:
|
||||
if "ruamel" in sys.modules:
|
||||
del sys.modules["ruamel"]
|
||||
|
||||
# The following code is here to avoid failures when updating
|
||||
# the develop version, due to spurious argparse.pyc files remaining
|
||||
# in the libs/spack/external directory, see:
|
||||
# https://github.com/spack/spack/pull/25376
|
||||
# TODO: Remove in v0.18.0 or later
|
||||
try:
|
||||
import argparse
|
||||
except ImportError:
|
||||
argparse_pyc = os.path.join(spack_external_libs, 'argparse.pyc')
|
||||
if not os.path.exists(argparse_pyc):
|
||||
raise
|
||||
try:
|
||||
os.remove(argparse_pyc)
|
||||
import argparse # noqa
|
||||
except Exception:
|
||||
msg = ('The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. '
|
||||
'Either delete it manually or ask some administrator to '
|
||||
'delete it for you.')
|
||||
print(msg.format(argparse_pyc))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
import spack.main # noqa
|
||||
|
||||
# Once we've set up the system path, run the spack main method
|
||||
|
32
etc/spack/defaults/bootstrap.yaml
Normal file
32
etc/spack/defaults/bootstrap.yaml
Normal file
@@ -0,0 +1,32 @@
|
||||
bootstrap:
|
||||
# If set to false Spack will not bootstrap missing software,
|
||||
# but will instead raise an error.
|
||||
enable: true
|
||||
# Root directory for bootstrapping work. The software bootstrapped
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: ~/.spack/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions: true
|
||||
spack-install: true
|
@@ -134,6 +134,10 @@ config:
|
||||
# enabling locks.
|
||||
locks: true
|
||||
|
||||
# The default url fetch method to use.
|
||||
# If set to 'curl', Spack will require curl on the user's system
|
||||
# If set to 'urllib', Spack will use python built-in libs to fetch
|
||||
url_fetch_method: urllib
|
||||
|
||||
# The maximum number of jobs to use for the build system (e.g. `make`), when
|
||||
# the -j flag is not given on the command line. Defaults to 16 when not set.
|
||||
|
@@ -43,6 +43,7 @@ packages:
|
||||
opencl: [pocl]
|
||||
onedal: [intel-oneapi-dal]
|
||||
osmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
rpc: [libtirpc]
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS = -W
|
||||
SPHINXOPTS = -W --keep-going
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
@@ -695,6 +695,136 @@ structured the way you want:
|
||||
}
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
``spack diff``
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
It's often the case that you have two versions of a spec that you need to
|
||||
disambiguate. Let's say that we've installed two variants of zlib, one with
|
||||
and one without the optimize variant:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib
|
||||
$ spack install zlib -optimize
|
||||
|
||||
When we do ``spack find`` we see the two versions.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find zlib
|
||||
==> 2 installed packages
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
zlib@1.2.11 zlib@1.2.11
|
||||
|
||||
|
||||
Let's now say that we want to uninstall zlib. We run the command, and hit a problem
|
||||
real quickly since we have two!
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack uninstall zlib
|
||||
==> Error: zlib matches multiple packages:
|
||||
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
efzjziy zlib@1.2.11 sl7m27m zlib@1.2.11
|
||||
|
||||
==> Error: You can either:
|
||||
a) use a more specific spec, or
|
||||
b) specify the spec by its hash (e.g. `spack uninstall /hash`), or
|
||||
c) use `spack uninstall --all` to uninstall ALL matching specs.
|
||||
|
||||
Oh no! We can see from the above that we have two different versions of zlib installed,
|
||||
and the only difference between the two is the hash. This is a good use case for
|
||||
``spack diff``, which can easily show us the "diff" or set difference
|
||||
between properties for two packages. Let's try it out.
|
||||
Since the only difference we see in the ``spack find`` view is the hash, let's use
|
||||
``spack diff`` to look for more detail. We will provide the two hashes:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff /efzjziy /sl7m27m
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- zlib@1.2.11efzjziyc3dmb5h5u5azsthgbgog5mj7g
|
||||
+++ zlib@1.2.11sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
@@ variant_value @@
|
||||
- zlib optimize False
|
||||
+ zlib optimize True
|
||||
|
||||
|
||||
The output is colored, and written in the style of a git diff. This means that you
|
||||
can copy and paste it into a GitHub markdown as a code block with language "diff"
|
||||
and it will render nicely! Here is an example:
|
||||
|
||||
.. code-block:: md
|
||||
|
||||
```diff
|
||||
--- zlib@1.2.11/efzjziyc3dmb5h5u5azsthgbgog5mj7g
|
||||
+++ zlib@1.2.11/sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
@@ variant_value @@
|
||||
- zlib optimize False
|
||||
+ zlib optimize True
|
||||
```
|
||||
|
||||
Awesome! Now let's read the diff. It tells us that our first zlib was built with ``~optimize``
|
||||
(``False``) and the second was built with ``+optimize`` (``True``). You can't see it in the docs
|
||||
here, but the output above is also colored based on the content being an addition (+) or
|
||||
subtraction (-).
|
||||
|
||||
This is a small example, but you will be able to see differences for any attributes on the
|
||||
installation spec. Running ``spack diff A B`` means we'll see which spec attributes are on
|
||||
``B`` but not on ``A`` (green) and which are on ``A`` but not on ``B`` (red). Here is another
|
||||
example with an additional difference type, ``version``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff python@2.7.8 python@3.8.11
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- python@2.7.8/tsxdi6gl4lihp25qrm4d6nys3nypufbf
|
||||
+++ python@3.8.11/yjtseru4nbpllbaxb46q7wfkyxbuvzxx
|
||||
@@ variant_value @@
|
||||
- python patches a8c52415a8b03c0e5f28b5d52ae498f7a7e602007db2b9554df28cd5685839b8
|
||||
+ python patches 0d98e93189bc278fbc37a50ed7f183bd8aaf249a8e1670a465f0db6bb4f8cf87
|
||||
@@ version @@
|
||||
- openssl 1.0.2u
|
||||
+ openssl 1.1.1k
|
||||
- python 2.7.8
|
||||
+ python 3.8.11
|
||||
|
||||
Let's say that we were only interested in one kind of attribute above, ``version``.
|
||||
We can ask the command to only output this attribute. To do this, you'd add
|
||||
the ``--attribute`` for attribute parameter, which defaults to all. Here is how you
|
||||
would filter to show just versions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff --attribute version python@2.7.8 python@3.8.11
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- python@2.7.8/tsxdi6gl4lihp25qrm4d6nys3nypufbf
|
||||
+++ python@3.8.11/yjtseru4nbpllbaxb46q7wfkyxbuvzxx
|
||||
@@ version @@
|
||||
- openssl 1.0.2u
|
||||
+ openssl 1.1.1k
|
||||
- python 2.7.8
|
||||
+ python 3.8.11
|
||||
|
||||
And you can add as many attributes as you'd like with multiple `--attribute` arguments
|
||||
(for lots of attributes, you can use ``-a`` for short). Finally, if you want to view the
|
||||
data as json (and possibly pipe into an output file) just add ``--json``:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff --json python@2.7.8 python@3.8.11
|
||||
|
||||
|
||||
This data will be much longer because along with the differences for ``A`` vs. ``B`` and
|
||||
``B`` vs. ``A``, the JSON output also showsthe intersection.
|
||||
|
||||
|
||||
------------------------
|
||||
Using installed packages
|
||||
------------------------
|
||||
|
@@ -63,6 +63,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/custompackage
|
||||
build_systems/multiplepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
|
@@ -130,8 +130,8 @@ Adding flags to cmake
|
||||
To add additional flags to the ``cmake`` call, simply override the
|
||||
``cmake_args`` function. The following example defines values for the flags
|
||||
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
||||
and without the :py:meth:`~.CMakePackage.define` and
|
||||
:py:meth:`~.CMakePackage.define_from_variant` helper functions:
|
||||
and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
|
||||
:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
350
lib/spack/docs/build_systems/multiplepackage.rst
Normal file
350
lib/spack/docs/build_systems/multiplepackage.rst
Normal file
@@ -0,0 +1,350 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _multiplepackage:
|
||||
|
||||
----------------------
|
||||
Multiple Build Systems
|
||||
----------------------
|
||||
|
||||
Quite frequently, a package will change build systems from one version to the
|
||||
next. For example, a small project that once used a single Makefile to build
|
||||
may now require Autotools to handle the increased number of files that need to
|
||||
be compiled. Or, a package that once used Autotools may switch to CMake for
|
||||
Windows support. In this case, it becomes a bit more challenging to write a
|
||||
single build recipe for this package in Spack.
|
||||
|
||||
There are several ways that this can be handled in Spack:
|
||||
|
||||
#. Subclass the new build system, and override phases as needed (preferred)
|
||||
#. Subclass ``Package`` and implement ``install`` as needed
|
||||
#. Create separate ``*-cmake``, ``*-autotools``, etc. packages for each build system
|
||||
#. Rename the old package to ``*-legacy`` and create a new package
|
||||
#. Move the old package to a ``legacy`` repository and create a new package
|
||||
#. Drop older versions that only support the older build system
|
||||
|
||||
Of these options, 1 is preferred, and will be demonstrated in this
|
||||
documentation. Options 3-5 have issues with concretization, so shouldn't be
|
||||
used. Options 4-5 also don't support more than two build systems. Option 6 only
|
||||
works if the old versions are no longer needed. Option 1 is preferred over 2
|
||||
because it makes it easier to drop the old build system entirely.
|
||||
|
||||
The exact syntax of the package depends on which build systems you need to
|
||||
support. Below are a couple of common examples.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Makefile -> Autotools
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(MakefilePackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
The package subclasses from :ref:`makefilepackage`, which has three phases:
|
||||
|
||||
#. ``edit`` (does nothing by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, the ``install`` phase needed to be overridden because the
|
||||
Makefile did not have an install target. We also modify the Makefile to use
|
||||
Spack's compiler wrappers. The default ``build`` phase is not changed.
|
||||
|
||||
Starting with version 1.3.0, we want to use Autotools to build instead.
|
||||
:ref:`autotoolspackage` has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def autoreconf(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def configure(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* We moved the contents of the ``edit`` phase to the ``patch`` function. Since
|
||||
``AutotoolsPackage`` doesn't have an ``edit`` phase, the only way for this
|
||||
step to be executed is to move it to the ``patch`` function, which always
|
||||
gets run.
|
||||
* The ``autoreconf`` and ``configure`` phases become no-ops. Since the old
|
||||
Makefile-based build system doesn't use these, we ignore these phases when
|
||||
building ``foo@1.2.0``.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``foo@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Autotools -> CMake
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(AutotoolsPackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
The package subclasses from :ref:`autotoolspackage`, which has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
Starting with version 1.3.0, we want to use CMake to build instead.
|
||||
:ref:`cmakepackage` has three phases:
|
||||
|
||||
#. ``cmake`` (runs ``cmake ...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``cmake_args`` to specify additional flags to pass to ``cmake``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* Since CMake and Autotools are so similar, we only need to override the
|
||||
``cmake`` phase, we can use the default ``build`` and ``install`` phases.
|
||||
* We override ``cmake`` to run ``./configure`` for older versions.
|
||||
``configure_args`` remains the same.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``bar@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems for the same version
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
During the transition from one build system to another, developers often
|
||||
support multiple build systems at the same time. Spack can only use a single
|
||||
build system for a single version. To decide which build system to use for a
|
||||
particular version, take the following things into account:
|
||||
|
||||
1. If the developers explicitly state that one build system is preferred over
|
||||
another, use that one.
|
||||
2. If one build system is considered "experimental" while another is considered
|
||||
"stable", use the stable build system.
|
||||
3. Otherwise, use the newer build system.
|
||||
|
||||
The developer preference for which build system to use can change over time as
|
||||
a newer build system becomes stable/recommended.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dropping support for old build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When older versions of a package don't support a newer build system, it can be
|
||||
tempting to simply delete them from a package. This significantly reduces
|
||||
package complexity and makes the build recipe much easier to maintain. However,
|
||||
other packages or Spack users may rely on these older versions. The recommended
|
||||
approach is to first support both build systems (as demonstrated above),
|
||||
:ref:`deprecate <deprecate>` versions that rely on the old build system, and
|
||||
remove those versions and any phases that needed to be overridden in the next
|
||||
Spack release.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Three or more build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In rare cases, a package may change build systems multiple times. For example,
|
||||
a package may start with Makefiles, then switch to Autotools, then switch to
|
||||
CMake. The same logic used above can be extended to any number of build systems.
|
||||
For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Baz(CMakePackage):
|
||||
version("1.4.0", sha256="...") # CMake
|
||||
version("1.3.0", sha256="...") # Autotools
|
||||
version("1.2.0", sha256="...") # Makefile
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.3.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@1.3")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Additional examples
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When writing new packages, it often helps to see examples of existing packages.
|
||||
Here is an incomplete list of existing Spack packages that have changed build
|
||||
systems before:
|
||||
|
||||
================ ===================== ================
|
||||
Package Previous Build System New Build System
|
||||
================ ===================== ================
|
||||
amber custom CMake
|
||||
arpack-ng Autotools CMake
|
||||
atk Autotools Meson
|
||||
blast None Autotools
|
||||
dyninst Autotools CMake
|
||||
evtgen Autotools CMake
|
||||
fish Autotools CMake
|
||||
gdk-pixbuf Autotools Meson
|
||||
glib Autotools Meson
|
||||
glog Autotools CMake
|
||||
gmt Autotools CMake
|
||||
gtkplus Autotools Meson
|
||||
hpl Makefile Autotools
|
||||
interproscan Perl Maven
|
||||
jasper Autotools CMake
|
||||
kahip SCons CMake
|
||||
kokkos Makefile CMake
|
||||
kokkos-kernels Makefile CMake
|
||||
leveldb Makefile CMake
|
||||
libdrm Autotools Meson
|
||||
libjpeg-turbo Autotools CMake
|
||||
mesa Autotools Meson
|
||||
metis None CMake
|
||||
mpifileutils Autotools CMake
|
||||
muparser Autotools CMake
|
||||
mxnet Makefile CMake
|
||||
nest Autotools CMake
|
||||
neuron Autotools CMake
|
||||
nsimd CMake nsconfig
|
||||
opennurbs Makefile CMake
|
||||
optional-lite None CMake
|
||||
plasma Makefile CMake
|
||||
preseq Makefile Autotools
|
||||
protobuf Autotools CMake
|
||||
py-pygobject Autotools Python
|
||||
singularity Autotools Makefile
|
||||
span-lite None CMake
|
||||
ssht Makefile CMake
|
||||
string-view-lite None CMake
|
||||
superlu Makefile CMake
|
||||
superlu-dist Makefile CMake
|
||||
uncrustify Autotools CMake
|
||||
================ ===================== ================
|
||||
|
||||
Packages that support multiple build systems can be a bit confusing to write.
|
||||
Don't hesitate to open an issue or draft pull request and ask for advice from
|
||||
other Spack developers!
|
@@ -97,15 +97,19 @@ def setup(sphinx):
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
needs_sphinx = '1.8'
|
||||
needs_sphinx = '3.4'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc',
|
||||
'sphinx.ext.graphviz',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.todo',
|
||||
'sphinxcontrib.programoutput']
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.graphviz',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinxcontrib.programoutput',
|
||||
]
|
||||
|
||||
# Set default graphviz options
|
||||
graphviz_dot_args = [
|
||||
@@ -164,6 +168,19 @@ def setup(sphinx):
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build', '_spack_root', '.spack-env']
|
||||
|
||||
nitpicky = True
|
||||
nitpick_ignore = [
|
||||
# Python classes that intersphinx is unable to resolve
|
||||
('py:class', 'argparse.HelpFormatter'),
|
||||
('py:class', 'contextlib.contextmanager'),
|
||||
('py:class', 'module'),
|
||||
('py:class', '_io.BufferedReader'),
|
||||
('py:class', 'unittest.case.TestCase'),
|
||||
('py:class', '_frozen_importlib_external.SourceFileLoader'),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
('py:class', 'spack.provider_index._IndexBase'),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
@@ -358,3 +375,11 @@ class SpackStyle(DefaultStyle):
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
|
||||
# -- Extension configuration -------------------------------------------------
|
||||
|
||||
# sphinx.ext.intersphinx
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
|
@@ -108,9 +108,9 @@ with a high level view of Spack's directory structure:
|
||||
|
||||
spack/ <- spack module; contains Python code
|
||||
analyzers/ <- modules to run analysis on installed packages
|
||||
build_systems/ <- modules for different build systems
|
||||
build_systems/ <- modules for different build systems
|
||||
cmd/ <- each file in here is a spack subcommand
|
||||
compilers/ <- compiler description files
|
||||
compilers/ <- compiler description files
|
||||
container/ <- module for spack containerize
|
||||
hooks/ <- hook modules to run at different points
|
||||
modules/ <- modules for lmod, tcl, etc.
|
||||
@@ -151,24 +151,22 @@ Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`Package <spack.package.Package>` class, which
|
||||
Contains the :class:`~spack.package.Package` class, which
|
||||
is the superclass for all packages in Spack. Methods on ``Package``
|
||||
implement all phases of the :ref:`package lifecycle
|
||||
<package-lifecycle>` and manage the build process.
|
||||
|
||||
:mod:`spack.packages`
|
||||
Contains all of the packages in Spack and methods for managing them.
|
||||
Functions like :func:`packages.get <spack.packages.get>` and
|
||||
:func:`class_name_for_package_name
|
||||
<packages.class_name_for_package_name>` handle mapping package module
|
||||
names to class names and dynamically instantiating packages by name
|
||||
from module files.
|
||||
:mod:`spack.util.naming`
|
||||
Contains functions for mapping between Spack package names,
|
||||
Python module names, and Python class names. Functions like
|
||||
:func:`~spack.util.naming.mod_to_class` handle mapping package
|
||||
module names to class names.
|
||||
|
||||
:mod:`spack.relations`
|
||||
*Relations* are relationships between packages, like
|
||||
:func:`depends_on <spack.relations.depends_on>` and :func:`provides
|
||||
<spack.relations.provides>`. See :ref:`dependencies` and
|
||||
:ref:`virtual-dependencies`.
|
||||
:mod:`spack.directives`
|
||||
*Directives* are functions that can be called inside a package definition
|
||||
to modify the package, like :func:`~spack.directives.depends_on`
|
||||
and :func:`~spack.directives.provides`. See :ref:`dependencies`
|
||||
and :ref:`virtual-dependencies`.
|
||||
|
||||
:mod:`spack.multimethod`
|
||||
Implementation of the :func:`@when <spack.multimethod.when>`
|
||||
@@ -180,31 +178,27 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`Spec <spack.spec.Spec>` and :class:`SpecParser
|
||||
<spack.spec.SpecParser>`. Also implements most of the logic for
|
||||
normalization and concretization of specs.
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`Parser <spack.parse.Parser>` and :class:`Lexer
|
||||
<spack.parse.Lexer>`. Used by :class:`SpecParser
|
||||
<spack.spec.SpecParser>`.
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`DefaultConcretizer
|
||||
<spack.concretize.DefaultConcretizer>` implementation, which allows
|
||||
site administrators to change Spack's :ref:`concretization-policies`.
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
which allows site administrators to change Spack's :ref:`concretization-policies`.
|
||||
|
||||
:mod:`spack.version`
|
||||
Implements a simple :class:`Version <spack.version.Version>` class
|
||||
with simple comparison semantics. Also implements
|
||||
:class:`VersionRange <spack.version.VersionRange>` and
|
||||
:class:`VersionList <spack.version.VersionList>`. All three are
|
||||
comparable with each other and offer union and intersection
|
||||
operations. Spack uses these classes to compare versions and to
|
||||
manage version constraints on specs. Comparison semantics are
|
||||
similar to the ``LooseVersion`` class in ``distutils`` and to the
|
||||
way RPM compares version strings.
|
||||
Implements a simple :class:`~spack.version.Version` class with simple
|
||||
comparison semantics. Also implements :class:`~spack.version.VersionRange`
|
||||
and :class:`~spack.version.VersionList`. All three are comparable with each
|
||||
other and offer union and intersection operations. Spack uses these classes
|
||||
to compare versions and to manage version constraints on specs. Comparison
|
||||
semantics are similar to the ``LooseVersion`` class in ``distutils`` and to
|
||||
the way RPM compares version strings.
|
||||
|
||||
:mod:`spack.compilers`
|
||||
Submodules contains descriptors for all valid compilers in Spack.
|
||||
@@ -217,7 +211,7 @@ Spec-related modules
|
||||
yet.
|
||||
|
||||
:mod:`spack.architecture`
|
||||
:func:`architecture.sys_type <spack.architecture.sys_type>` is used
|
||||
:func:`architecture.default_arch <spack.architecture.default_arch>` is used
|
||||
to determine the host architecture while building.
|
||||
|
||||
.. warning::
|
||||
@@ -232,7 +226,7 @@ Build environment
|
||||
:mod:`spack.stage`
|
||||
Handles creating temporary directories for builds.
|
||||
|
||||
:mod:`spack.compilation`
|
||||
:mod:`spack.build_environment`
|
||||
This contains utility functions used by the compiler wrapper script,
|
||||
``cc``.
|
||||
|
||||
@@ -257,22 +251,19 @@ Unit tests
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
:mod:`spack.test.mock_packages`
|
||||
This is a fake package hierarchy used to mock up packages for
|
||||
Spack's test suite.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Research and Monitoring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.monitor`
|
||||
Contains :class:`SpackMonitor <spack.monitor.SpackMonitor>`. This is accessed
|
||||
from the ``spack install`` and ``spack analyze`` commands to send build
|
||||
and package metadada up to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
|
||||
the ``spack install`` and ``spack analyze`` commands to send build and
|
||||
package metadata up to a `Spack Monitor
|
||||
<https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
:mod:`spack.analyzers`
|
||||
A module folder with a :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`
|
||||
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
|
||||
that provides base functions to run, save, and (optionally) upload analysis
|
||||
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
@@ -286,7 +277,7 @@ Other Modules
|
||||
tarball URLs.
|
||||
|
||||
:mod:`spack.error`
|
||||
:class:`SpackError <spack.error.SpackError>`, the base class for
|
||||
:class:`~spack.error.SpackError`, the base class for
|
||||
Spack's exception hierarchy.
|
||||
|
||||
:mod:`llnl.util.tty`
|
||||
@@ -335,8 +326,8 @@ Writing analyzers
|
||||
To write an analyzer, you should add a new python file to the
|
||||
analyzers module directory at ``lib/spack/spack/analyzers`` .
|
||||
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
|
||||
to add an analyzer class ``Myanalyzer`` you woul write to
|
||||
``spack/analyzers/myanalyzer.py`` and import and
|
||||
to add an analyzer class ``Myanalyzer`` you would write to
|
||||
``spack/analyzers/myanalyzer.py`` and import and
|
||||
use the base as follows:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -347,7 +338,7 @@ use the base as follows:
|
||||
|
||||
|
||||
Note that the class name is your module file name, all lowercase
|
||||
except for the first capital letter. You can look at other analyzers in
|
||||
except for the first capital letter. You can look at other analyzers in
|
||||
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -356,13 +347,13 @@ Analyzer Output Directory
|
||||
|
||||
By default, when you run ``spack analyze run`` an analyzer output directory will
|
||||
be created in your spack user directory in your ``$HOME``. The reason we output here
|
||||
is because the install directory might not always be writable.
|
||||
is because the install directory might not always be writable.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
~/.spack/
|
||||
analyzers
|
||||
|
||||
|
||||
Result files will be written here, organized in subfolders in the same structure
|
||||
as the package, with each analyzer owning it's own subfolder. for example:
|
||||
|
||||
@@ -380,11 +371,11 @@ as the package, with each analyzer owning it's own subfolder. for example:
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── lib
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
Notice that for the libabigail analyzer, since results are generated per object,
|
||||
we honor the object's folder in case there are equivalently named files in
|
||||
we honor the object's folder in case there are equivalently named files in
|
||||
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
|
||||
|
||||
|
||||
@@ -426,7 +417,7 @@ and then return the object with a key as the analyzer name. The result data
|
||||
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
|
||||
and one of ``value`` or ``binary_value``. The install file should be for a relative
|
||||
path, and not the absolute path. For example, let's say we extract a metric called
|
||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||
We might have data that looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -482,7 +473,7 @@ Saving Analyzer Results
|
||||
The analyzer will have ``save_result`` called, with the result object generated
|
||||
to save it to the filesystem, and if the user has added the ``--monitor`` flag
|
||||
to upload it to a monitor server. If your result follows an accepted result
|
||||
format and you don't need to parse it further, you don't need to add this
|
||||
format and you don't need to parse it further, you don't need to add this
|
||||
function to your class. However, if your result data is large or otherwise
|
||||
needs additional parsing, you can define it. If you define the function, it
|
||||
is useful to know about the ``output_dir`` property, which you can join
|
||||
@@ -548,7 +539,7 @@ each one (separately) to the monitor:
|
||||
|
||||
Notice that this function, if you define it, requires a result object (generated by
|
||||
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
|
||||
to check if a result exists first, and not write to it if the result exists and
|
||||
to check if a result exists first, and not write to it if the result exists and
|
||||
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
|
||||
yet, you might want to do that here. You should also use ``tty.info`` to give
|
||||
the user a message of "Writing result to $DIRNAME."
|
||||
@@ -616,7 +607,7 @@ types of hooks in the ``__init__.py``, and then python files in that folder
|
||||
can use hook functions. The files are automatically parsed, so if you write
|
||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||
you can then write hook functions in that file that will be automatically detected,
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
of hooks, and how to write them.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -624,7 +615,7 @@ Types of Hooks
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
The following hooks are currently implemented to make it easy for you,
|
||||
the developer, to add hooks at different stages of a spack install or similar.
|
||||
the developer, to add hooks at different stages of a spack install or similar.
|
||||
If there is a hook that you would like and is missing, you can propose to add a new one.
|
||||
|
||||
"""""""""""""""""""""
|
||||
@@ -632,9 +623,9 @@ If there is a hook that you would like and is missing, you can propose to add a
|
||||
"""""""""""""""""""""
|
||||
|
||||
A ``pre_install`` hook is run within an install subprocess, directly before
|
||||
the install starts. It expects a single argument of a spec, and is run in
|
||||
the install starts. It expects a single argument of a spec, and is run in
|
||||
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
|
||||
as we have defined them here, but rather callback functions associated with
|
||||
as we have defined them here, but rather callback functions associated with
|
||||
a package install.
|
||||
|
||||
|
||||
@@ -657,7 +648,7 @@ here.
|
||||
This hook is run at the beginning of ``lib/spack/spack/installer.py``,
|
||||
in the install function of a ``PackageInstaller``,
|
||||
and importantly is not part of a build process, but before it. This is when
|
||||
we have just newly grabbed the task, and are preparing to install. If you
|
||||
we have just newly grabbed the task, and are preparing to install. If you
|
||||
write a hook of this type, you should provide the spec to it.
|
||||
|
||||
.. code-block:: python
|
||||
@@ -666,7 +657,7 @@ write a hook of this type, you should provide the spec to it.
|
||||
"""On start of an install, we want to...
|
||||
"""
|
||||
print('on_install_start')
|
||||
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
``on_install_success(spec)``
|
||||
@@ -744,8 +735,8 @@ to trigger after anything is written to a logger. You would add it as follows:
|
||||
post_install = HookRunner('post_install')
|
||||
|
||||
# hooks related to logging
|
||||
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
|
||||
|
||||
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
|
||||
|
||||
|
||||
You then need to decide what arguments my hook would expect. Since this is
|
||||
related to logging, let's say that you want a message and level. That means
|
||||
@@ -775,7 +766,7 @@ In this example, we use it outside of a logger that is already defined:
|
||||
|
||||
This is not to say that this would be the best way to implement an integration
|
||||
with the logger (you'd probably want to write a custom logger, or you could
|
||||
have the hook defined within the logger) but serves as an example of writing a hook.
|
||||
have the hook defined within the logger) but serves as an example of writing a hook.
|
||||
|
||||
----------
|
||||
Unit tests
|
||||
@@ -785,6 +776,38 @@ Unit tests
|
||||
Unit testing
|
||||
------------
|
||||
|
||||
---------------------
|
||||
Developer environment
|
||||
---------------------
|
||||
|
||||
.. warning::
|
||||
|
||||
This is an experimental feature. It is expected to change and you should
|
||||
not use it in a production environment.
|
||||
|
||||
|
||||
When installing a package, we currently have support to export environment
|
||||
variables to specify adding debug flags to the build. By default, a package
|
||||
install will build without any debug flag. However, if you want to add them,
|
||||
you can export:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
export SPACK_ADD_DEBUG_FLAGS=true
|
||||
spack install zlib
|
||||
|
||||
|
||||
If you want to add custom flags, you should export an additional variable:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
export SPACK_ADD_DEBUG_FLAGS=true
|
||||
export SPACK_DEBUG_FLAGS="-g"
|
||||
spack install zlib
|
||||
|
||||
These environment variables will eventually be integrated into spack so
|
||||
they are set from the command line.
|
||||
|
||||
------------------
|
||||
Developer commands
|
||||
------------------
|
||||
@@ -795,6 +818,29 @@ Developer commands
|
||||
``spack doc``
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
.. _cmd-spack-style:
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
``spack style``
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
spack style exists to help the developer user to check imports and style with
|
||||
mypy, flake8, isort, and (soon) black. To run all style checks, simply do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack style
|
||||
|
||||
To run automatic fixes for isort you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack style --fix
|
||||
|
||||
You do not need any of these Python packages installed on your system for
|
||||
the checks to work! Spack will bootstrap install them from packages for
|
||||
your use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
``spack unit-test``
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -873,7 +919,7 @@ just like you would with the normal ``python`` command.
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Spack blame is a way to quickly see contributors to packages or files
|
||||
in the spack repository. You should provide a target package name or
|
||||
in the spack repository. You should provide a target package name or
|
||||
file name to the command. Here is an example asking to see contributions
|
||||
for the package "python":
|
||||
|
||||
@@ -883,8 +929,8 @@ for the package "python":
|
||||
LAST_COMMIT LINES % AUTHOR EMAIL
|
||||
2 weeks ago 3 0.3 Mickey Mouse <cheddar@gmouse.org>
|
||||
a month ago 927 99.7 Minnie Mouse <swiss@mouse.org>
|
||||
|
||||
2 weeks ago 930 100.0
|
||||
|
||||
2 weeks ago 930 100.0
|
||||
|
||||
|
||||
By default, you will get a table view (shown above) sorted by date of contribution,
|
||||
@@ -1255,7 +1301,7 @@ Publishing a release on GitHub
|
||||
|
||||
#. Create the release in GitHub.
|
||||
|
||||
* Go to
|
||||
* Go to
|
||||
`github.com/spack/spack/releases <https://github.com/spack/spack/releases>`_
|
||||
and click ``Draft a new release``.
|
||||
|
||||
|
@@ -732,13 +732,17 @@ Configuring environment views
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a view descriptor, headed
|
||||
by a name. The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, and ``select`` and
|
||||
``exclude`` lists for the view. For example, in the following manifest
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``. For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
All the dependencies of each root spec in the environment will be linked
|
||||
in the view due to the command ``link: all`` and the files in the view will
|
||||
be symlinks to the spack install directories.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -751,11 +755,16 @@ excludes those built with the PGI compiler at version 18.5.
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: {name}/{version}-{compiler.name}
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
For more information on using view projections, see the section on
|
||||
:ref:`adding_projections_to_views`. The default for the ``select`` and
|
||||
``exclude`` values is to select everything and exclude nothing. The
|
||||
default projection is the default view projection (``{}``).
|
||||
default projection is the default view projection (``{}``). The ``link``
|
||||
defaults to ``all`` but can also be ``roots`` when only the root specs
|
||||
in the environment are desired in the view. The ``link_type`` defaults
|
||||
to ``symlink`` but can also take the value of ``hardlink`` or ``copy``.
|
||||
|
||||
Any number of views may be defined under the ``view`` heading in a
|
||||
Spack Environment.
|
||||
|
@@ -9,21 +9,16 @@
|
||||
Getting Started
|
||||
===============
|
||||
|
||||
-------------
|
||||
Prerequisites
|
||||
-------------
|
||||
--------------------
|
||||
System Prerequisites
|
||||
--------------------
|
||||
|
||||
Spack has the following minimum requirements, which must be installed
|
||||
before Spack is run:
|
||||
Spack has the following minimum system requirements, which are assumed to
|
||||
be present on the machine where Spack is run:
|
||||
|
||||
#. Python 2 (2.6 or 2.7) or 3 (3.5 - 3.9) to run Spack
|
||||
#. A C/C++ compiler for building
|
||||
#. The ``make`` executable for building
|
||||
#. The ``tar``, ``gzip``, ``unzip``, ``bzip2``, ``xz`` and optionally ``zstd``
|
||||
executables for extracting source code
|
||||
#. The ``patch`` command to apply patches
|
||||
#. The ``git`` and ``curl`` commands for fetching
|
||||
#. If using the ``gpg`` subcommand, ``gnupg2`` is required
|
||||
.. csv-table:: System prerequisites for Spack
|
||||
:file: tables/system_prerequisites.csv
|
||||
:header-rows: 1
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, XCode is required. Spack is designed to run on HPC
|
||||
@@ -89,6 +84,151 @@ sourcing time, ensuring future invocations of the ``spack`` command will
|
||||
continue to use the same consistent python version regardless of changes in
|
||||
the environment.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using ``clingo`` as an external solver to compute which software
|
||||
needs to be installed. The default configuration allows Spack to install
|
||||
``clingo`` from a public buildcache, created by a Github Action workflow. In this
|
||||
case the bootstrapping procedure is transparent to the user, except for a
|
||||
slightly long waiting time on the first concretization of a spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
$ time spack solve zlib
|
||||
==> Best of 2 considered solutions.
|
||||
==> Optimization Criteria:
|
||||
Priority Criterion Value
|
||||
1 deprecated versions used 0
|
||||
2 version weight 0
|
||||
3 number of non-default variants (roots) 0
|
||||
4 multi-valued variants 0
|
||||
5 preferred providers for roots 0
|
||||
6 number of non-default variants (non-roots) 0
|
||||
7 preferred providers (non-roots) 0
|
||||
8 compiler mismatches 0
|
||||
9 version badness 0
|
||||
10 count of non-root multi-valued variants 0
|
||||
11 non-preferred compilers 0
|
||||
12 target mismatches 0
|
||||
13 non-preferred targets 0
|
||||
|
||||
zlib@1.2.11%gcc@11.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
real 0m30,618s
|
||||
user 0m27,278s
|
||||
sys 0m1,549s
|
||||
|
||||
After this command you'll see that ``clingo`` has been installed for Spack's own use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 2 installed packages
|
||||
-- linux-rhel5-x86_64 / gcc@9.3.0 -------------------------------
|
||||
clingo-bootstrap@spack python@3.6
|
||||
|
||||
Subsequent calls to the concretizer will then be much faster:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ time spack solve zlib
|
||||
[ ... ]
|
||||
real 0m1,222s
|
||||
user 0m1,146s
|
||||
sys 0m0,059s
|
||||
|
||||
If for security or for other reasons you don't want to or can't install precompiled
|
||||
binaries, Spack can fall-back to bootstrap ``clingo`` from source files. To forbid
|
||||
Spack from retrieving binaries from the bootstrapping buildcache, the following
|
||||
command must be given:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
since an "untrusted" way of bootstrapping software will not be considered
|
||||
by Spack. You can verify the new settings are effective with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap list
|
||||
Name: github-actions UNTRUSTED
|
||||
|
||||
Type: buildcache
|
||||
|
||||
Info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
|
||||
Description:
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
|
||||
|
||||
Name: spack-install TRUSTED
|
||||
|
||||
Type: install
|
||||
|
||||
Description:
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
|
||||
When bootstrapping from sources, Spack requires a compiler with support
|
||||
for C++14 (GCC on ``linux``, Apple Clang on ``darwin``) and static C++
|
||||
standard libraries on ``linux``. Spack will build the required software
|
||||
on the first request to concretize a spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to speed-up bootstrapping ``clingo`` from sources, you may try to
|
||||
search for ``cmake`` and ``bison`` on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
"""""""""""""""""""
|
||||
The Bootstrap Store
|
||||
"""""""""""""""""""
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Check Installation
|
||||
@@ -117,53 +257,6 @@ environment*, especially for ``PATH``. Only software that comes with
|
||||
the system, or that you know you wish to use with Spack, should be
|
||||
included. This procedure will avoid many strange build errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using clingo as an external solver to compute which software
|
||||
needs to be installed. If you have a default compiler supporting C++14 Spack
|
||||
can automatically bootstrap this tool from sources the first time it is
|
||||
needed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
If you want to speed-up bootstrapping, you may try to search for ``cmake`` and ``bison``
|
||||
on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Alternate Prefix
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -367,6 +460,34 @@ then inject those flags into the compiler command. Compiler flags
|
||||
entered from the command line will be discussed in more detail in the
|
||||
following section.
|
||||
|
||||
Some compilers also require additional environment configuration.
|
||||
Examples include Intels oneAPI and AMDs AOCC compiler suites,
|
||||
which have custom scripts for loading environment variables and setting paths.
|
||||
These variables should be specified in the ``environment`` section of the compiler
|
||||
specification. The operations available to modify the environment are ``set``, ``unset``,
|
||||
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
spec: oneapi@latest
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -521,8 +642,9 @@ Fortran.
|
||||
#. Run ``spack compiler find`` to locate Clang.
|
||||
|
||||
#. There are different ways to get ``gfortran`` on macOS. For example, you can
|
||||
install GCC with Spack (``spack install gcc``) or with Homebrew
|
||||
(``brew install gcc``).
|
||||
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
|
||||
gcc``), or from a `DMG installer
|
||||
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
|
||||
|
||||
#. The only thing left to do is to edit ``~/.spack/darwin/compilers.yaml`` to provide
|
||||
the path to ``gfortran``:
|
||||
@@ -543,7 +665,8 @@ Fortran.
|
||||
If you used Spack to install GCC, you can get the installation prefix by
|
||||
``spack location -i gcc`` (this will only work if you have a single version
|
||||
of GCC installed). Whereas for Homebrew, GCC is installed in
|
||||
``/usr/local/Cellar/gcc/x.y.z``.
|
||||
``/usr/local/Cellar/gcc/x.y.z``. With the DMG installer, the correct path
|
||||
will be ``/usr/local/gfortran``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Compiler Verification
|
||||
@@ -777,7 +900,7 @@ an OpenMPI installed in /opt/local, one would use:
|
||||
buildable: False
|
||||
|
||||
In general, Spack is easier to use and more reliable if it builds all of
|
||||
its own dependencies. However, there are two packages for which one
|
||||
its own dependencies. However, there are several packages for which one
|
||||
commonly needs to use system versions:
|
||||
|
||||
^^^
|
||||
|
@@ -612,6 +612,7 @@ it executable, then runs it with some arguments.
|
||||
installer = Executable(self.stage.archive_file)
|
||||
installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.')
|
||||
|
||||
.. _deprecate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Deprecating old versions
|
||||
@@ -2884,52 +2885,52 @@ The package base class, usually specialized for a given build system, determines
|
||||
actual set of entities available for overriding.
|
||||
The classes that are currently provided by Spack are:
|
||||
|
||||
+-------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+===============================+==================================+
|
||||
| :py:class:`.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.MakefilePackage` | Specialized class for packages |
|
||||
| | built invoking |
|
||||
| | hand-written Makefiles |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.AutotoolsPackage` | Specialized class for packages |
|
||||
| | built using GNU Autotools |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.CMakePackage` | Specialized class for packages |
|
||||
| | built using CMake |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.QMakePackage` | Specialized class for packages |
|
||||
| | build using QMake |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.SConsPackage` | Specialized class for packages |
|
||||
| | built using SCons |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.WafPackage` | Specialized class for packages |
|
||||
| | built using Waf |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.RPackage` | Specialized class for |
|
||||
| | :py:class:`.R` extensions |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.OctavePackage` | Specialized class for |
|
||||
| | :py:class:`.Octave` packages |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.PythonPackage` | Specialized class for |
|
||||
| | :py:class:`.Python` extensions |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.PerlPackage` | Specialized class for |
|
||||
| | :py:class:`.Perl` extensions |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.IntelPackage` | Specialized class for licensed |
|
||||
| | Intel software |
|
||||
+-------------------------------+----------------------------------+
|
||||
+-------------------------=--------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.package.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
||||
| | built invoking |
|
||||
| | hand-written Makefiles |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages |
|
||||
| | built using GNU Autotools |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages |
|
||||
| | built using CMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages |
|
||||
| | built using QMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages |
|
||||
| | built using SCons |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages |
|
||||
| | built using Waf |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.r.RPackage` | Specialized class for |
|
||||
| | R extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for |
|
||||
| | Octave packages |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.python.PythonPackage` | Specialized class for |
|
||||
| | Python extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for |
|
||||
| | Perl extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed |
|
||||
| | Intel software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
|
||||
|
||||
.. note::
|
||||
@@ -2939,7 +2940,7 @@ The classes that are currently provided by Spack are:
|
||||
rare cases where manual intervention is needed we need to stress that a
|
||||
package base class depends on the *build system* being used, not the language of the package.
|
||||
For example, a Python extension installed with CMake would ``extends('python')`` and
|
||||
subclass from :py:class:`.CMakePackage`.
|
||||
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Installation pipeline
|
||||
@@ -4079,7 +4080,7 @@ prefix **before** ``make install``. Builds like this can falsely report
|
||||
success when an error occurs before the installation is complete. Simple
|
||||
sanity checks can be used to identify files and or directories that are
|
||||
required of a successful installation. Spack checks for the presence of
|
||||
the files and directories after ``install()`` runs.
|
||||
the files and directories after ``install()`` runs.
|
||||
|
||||
If any of the listed files or directories are missing, then the build will
|
||||
fail and the install prefix will be removed. If they all exist, then Spack
|
||||
@@ -4193,7 +4194,7 @@ need to use two decorators for each phase test method:
|
||||
The first decorator tells Spack when in the installation process to
|
||||
run your test method installation process; namely *after* the provided
|
||||
installation phase. The second decorator tells Spack to only run the
|
||||
checks when the ``--test`` option is provided on the command line.
|
||||
checks when the ``--test`` option is provided on the command line.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -4267,17 +4268,17 @@ tests can be performed days, even weeks, after the software is installed.
|
||||
|
||||
Stand-alone tests are checks that should run relatively quickly -- as
|
||||
in on the order of at most a few minutes -- and ideally execute all
|
||||
aspects of the installed software, or at least key functionality.
|
||||
aspects of the installed software, or at least key functionality.
|
||||
|
||||
.. note::
|
||||
|
||||
Execution speed is important because these tests are intended
|
||||
to quickly assess whether the installed software works on the
|
||||
system.
|
||||
|
||||
|
||||
Failing stand-alone tests indicate that there is no reason to
|
||||
proceed with more resource-intensive tests.
|
||||
|
||||
|
||||
Passing stand-alone (or smoke) tests can lead to more thorough
|
||||
testing, such as extensive unit or regression tests, or tests
|
||||
that run at scale. Spack support for more thorough testing is
|
||||
@@ -4307,7 +4308,7 @@ file such that:
|
||||
test_stage: /path/to/stage
|
||||
|
||||
The package can access this path **during test processing** using
|
||||
`self.test_suite.stage`.
|
||||
`self.test_suite.stage`.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -4367,9 +4368,9 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
where ``srcs`` is a string or a list of strings corresponding to
|
||||
the paths for the files and or subdirectories, relative to the staged
|
||||
source, that are to be copied to the corresponding path relative to
|
||||
``self.install_test_root``. All of the contents within each subdirectory
|
||||
will be also be copied.
|
||||
source, that are to be copied to the corresponding relative test path
|
||||
under the prefix. All of the contents within each subdirectory will
|
||||
also be copied.
|
||||
|
||||
For example, a package method for copying everything in the ``tests``
|
||||
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||
@@ -4377,8 +4378,13 @@ can be implemented as shown below.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``run_after`` directive ensures associated files are copied
|
||||
**after** the package is installed by the build process.
|
||||
The method name ``copy_test_sources`` here is for illustration
|
||||
purposes. You are free to use a name that is more suited to your
|
||||
package.
|
||||
|
||||
The key to copying the files at build time for stand-alone testing
|
||||
is use of the ``run_after`` directive, which ensures the associated
|
||||
files are copied **after** the provided build stage.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -4388,25 +4394,20 @@ can be implemented as shown below.
|
||||
@run_after('install')
|
||||
def copy_test_sources(self):
|
||||
srcs = ['tests',
|
||||
join_path('examples', 'foo.c'),
|
||||
join_path('examples', 'foo.c'),
|
||||
join_path('examples', 'bar.c')]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
|
||||
In this case, the method copies the associated files from the build
|
||||
stage **after** the software is installed to the package's metadata
|
||||
directory. The result is the directory and files will be cached in
|
||||
paths under ``self.install_test_root`` as follows:
|
||||
|
||||
* ``join_path(self.install_test_root, 'tests')`` along with its files
|
||||
and subdirectories
|
||||
* ``join_path(self.install_test_root, 'examples', 'foo.c')``
|
||||
* ``join_path(self.install_test_root, 'examples', 'bar.c')``
|
||||
a special test subdirectory under the installation prefix.
|
||||
|
||||
These paths are **automatically copied** to the test stage directory
|
||||
where they are available to the package's ``test`` method through the
|
||||
``self.test_suite.current_test_cache_dir`` property. In our example,
|
||||
the method can access the directory and files using the following
|
||||
paths:
|
||||
during stand-alone testing. The package's ``test`` method can access
|
||||
them using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example, the method would use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, 'tests')``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
||||
@@ -4414,9 +4415,8 @@ paths:
|
||||
|
||||
.. note::
|
||||
|
||||
Library developers will want to build the associated tests under
|
||||
the ``self.test_suite.current_test_cache_dir`` and against their
|
||||
**installed** libraries before running them.
|
||||
Library developers will want to build the associated tests
|
||||
against their **installed** libraries before running them.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -4426,11 +4426,6 @@ paths:
|
||||
would be appropriate for ensuring the installed software continues
|
||||
to work as the underlying system evolves.
|
||||
|
||||
.. note::
|
||||
|
||||
You are free to use a method name that is more suitable for
|
||||
your package.
|
||||
|
||||
.. _cache_custom_files:
|
||||
|
||||
"""""""""""""""""""
|
||||
@@ -4446,7 +4441,7 @@ Examples include:
|
||||
- expected test output
|
||||
|
||||
These extra files should be added to the ``test`` subdirectory of the
|
||||
package in the Spack repository.
|
||||
package in the Spack repository.
|
||||
|
||||
Spack will **automatically copy** the contents of that directory to the
|
||||
test staging directory for stand-alone testing. The ``test`` method can
|
||||
@@ -4471,7 +4466,7 @@ The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
where ``filename`` is the path to the file containing the expected output.
|
||||
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
accessed and used as illustrated by a simplified version of an ``sqlite``
|
||||
package check:
|
||||
|
||||
@@ -4509,7 +4504,8 @@ can retrieve the expected output from ``examples/foo.out`` using:
|
||||
|
||||
def test(self):
|
||||
..
|
||||
filename = join_path(self.install_test_root, 'examples', 'foo.out')
|
||||
filename = join_path(self.test_suite.current_test_cache_dir,
|
||||
'examples', 'foo.out')
|
||||
expected = get_escaped_text_output(filename)
|
||||
..
|
||||
|
||||
@@ -4591,10 +4587,10 @@ where each argument has the following meaning:
|
||||
|
||||
Options are a list of strings to be passed to the executable when
|
||||
it runs.
|
||||
|
||||
|
||||
The default is ``[]``, which means no options are provided to the
|
||||
executable.
|
||||
|
||||
|
||||
* ``expected`` is an optional list of expected output strings.
|
||||
|
||||
Spack requires every string in ``expected`` to be a regex matching
|
||||
@@ -4605,31 +4601,31 @@ where each argument has the following meaning:
|
||||
|
||||
The expected output can be :ref:`read from a file
|
||||
<expected_test_output_from_file>`.
|
||||
|
||||
|
||||
The default is ``expected=[]``, so Spack will not check the output.
|
||||
|
||||
|
||||
* ``status`` is the optional expected return code(s).
|
||||
|
||||
A list of return codes corresponding to successful execution can
|
||||
be provided (e.g., ``status=[0,3,7]``). Support for non-zero return
|
||||
codes allows for basic **expected failure** tests as well as different
|
||||
return codes across versions of the software.
|
||||
|
||||
|
||||
The default is ``status=[0]``, which corresponds to **successful**
|
||||
execution in the sense that the executable does not exit with a
|
||||
failure code or raise an exception.
|
||||
|
||||
|
||||
* ``installed`` is used to require ``exe`` to be within the package
|
||||
prefix.
|
||||
|
||||
|
||||
If ``True``, then the path for ``exe`` is required to be within the
|
||||
package prefix; otherwise, the path is not constrained.
|
||||
|
||||
|
||||
The default is ``False``, so the fully qualified path for ``exe``
|
||||
does **not** need to be within the installation directory.
|
||||
|
||||
|
||||
* ``purpose`` is an optional heading describing the the test part.
|
||||
|
||||
|
||||
Output from the test is written to a test log file so this argument
|
||||
serves as a searchable heading in text logs to highlight the start
|
||||
of the test part. Having a description can be helpful when debugging
|
||||
@@ -4644,10 +4640,10 @@ where each argument has the following meaning:
|
||||
|
||||
The default is ``False``, which means the test executable must be
|
||||
present for any installable version of the software.
|
||||
|
||||
|
||||
* ``work_dir`` is the path to the directory from which the executable
|
||||
will run.
|
||||
|
||||
|
||||
The default of ``None`` corresponds to the current directory (``'.'``).
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
@@ -4677,9 +4673,6 @@ directory paths are provided in the table below.
|
||||
* - Test Suite Stage Files
|
||||
- ``self.test_suite.stage``
|
||||
- ``join_path(self.test_suite.stage, 'results.txt')``
|
||||
* - Cached Build-time Files
|
||||
- ``self.install_test_root``
|
||||
- ``join_path(self.install_test_root, 'examples', 'foo.c')``
|
||||
* - Staged Cached Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
||||
@@ -4754,7 +4747,7 @@ where only the outputs for the first of each set are shown:
|
||||
Copyright (C) 2018 Free Software Foundation, Inc.
|
||||
This is free software; see the source for copying conditions. There is NO
|
||||
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
PASSED
|
||||
...
|
||||
==> [2021-04-26-17:35:20.493921] test: checking mpirun output
|
||||
@@ -4915,7 +4908,7 @@ This is already part of the boilerplate for packages created with
|
||||
Filtering functions
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <spack.filter_file>`
|
||||
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <llnl.util.filesystem.filter_file>`
|
||||
Works like ``sed`` but with Python regular expression syntax. Takes
|
||||
a regular expression, a replacement, and a set of files. ``repl``
|
||||
can be a raw string or a callable function. If it is a raw string,
|
||||
@@ -4953,7 +4946,7 @@ Filtering functions
|
||||
filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx,
|
||||
prefix.bin.mpicxx)
|
||||
|
||||
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <spack.change_sed_delim>`
|
||||
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <llnl.util.filesystem.change_sed_delimiter>`
|
||||
Some packages, like TAU, have a build system that can't install
|
||||
into directories with, e.g. '@' in the name, because they use
|
||||
hard-coded ``sed`` commands in their build.
|
||||
@@ -4975,14 +4968,14 @@ Filtering functions
|
||||
File functions
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
:py:func:`ancestor(dir, n=1) <spack.ancestor>`
|
||||
:py:func:`ancestor(dir, n=1) <llnl.util.filesystem.ancestor>`
|
||||
Get the n\ :sup:`th` ancestor of the directory ``dir``.
|
||||
|
||||
:py:func:`can_access(path) <spack.can_access>`
|
||||
:py:func:`can_access(path) <llnl.util.filesystem.can_access>`
|
||||
True if we can read and write to the file at ``path``. Same as
|
||||
native python ``os.access(file_name, os.R_OK|os.W_OK)``.
|
||||
|
||||
:py:func:`install(src, dest) <spack.install>`
|
||||
:py:func:`install(src, dest) <llnl.util.filesystem.install>`
|
||||
Install a file to a particular location. For example, install a
|
||||
header into the ``include`` directory under the install ``prefix``:
|
||||
|
||||
@@ -4990,14 +4983,14 @@ File functions
|
||||
|
||||
install('my-header.h', prefix.include)
|
||||
|
||||
:py:func:`join_path(*paths) <spack.join_path>`
|
||||
:py:func:`join_path(*paths) <llnl.util.filesystem.join_path>`
|
||||
An alias for ``os.path.join``. This joins paths using the OS path separator.
|
||||
|
||||
:py:func:`mkdirp(*paths) <spack.mkdirp>`
|
||||
:py:func:`mkdirp(*paths) <llnl.util.filesystem.mkdirp>`
|
||||
Create each of the directories in ``paths``, creating any parent
|
||||
directories if they do not exist.
|
||||
|
||||
:py:func:`working_dir(dirname, kwargs) <spack.working_dir>`
|
||||
:py:func:`working_dir(dirname, kwargs) <llnl.util.filesystem.working_dir>`
|
||||
This is a Python `Context Manager
|
||||
<https://docs.python.org/2/library/contextlib.html>`_ that makes it
|
||||
easier to work with subdirectories in builds. You use this with the
|
||||
@@ -5039,7 +5032,7 @@ File functions
|
||||
The ``create=True`` keyword argument causes the command to create
|
||||
the directory if it does not exist.
|
||||
|
||||
:py:func:`touch(path) <spack.touch>`
|
||||
:py:func:`touch(path) <llnl.util.filesystem.touch>`
|
||||
Create an empty file at ``path``.
|
||||
|
||||
.. _make-package-findable:
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx
|
||||
sphinx>=3.4,!=4.1.2
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
|
@@ -8,12 +8,20 @@
|
||||
# these commands in this directory to install Sphinx and its plugins,
|
||||
# then build the docs:
|
||||
#
|
||||
# spack install
|
||||
# spack env activate .
|
||||
# spack install
|
||||
# make
|
||||
#
|
||||
spack:
|
||||
specs:
|
||||
- py-sphinx
|
||||
# Sphinx
|
||||
- "py-sphinx@3.4:4.1.1,4.1.3:"
|
||||
- py-sphinxcontrib-programoutput
|
||||
- py-sphinx-rtd-theme
|
||||
# VCS
|
||||
- git
|
||||
- mercurial
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretization: together
|
||||
|
17
lib/spack/docs/tables/system_prerequisites.csv
Normal file
17
lib/spack/docs/tables/system_prerequisites.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.6/2.7/3.5-3.9, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
bzip, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
|
@@ -387,7 +387,7 @@ some nice features:
|
||||
Spack-built compiler can be given to an IDE without requiring the
|
||||
IDE to load that compiler's module.
|
||||
|
||||
Unfortunately, Spack's RPATH support does not work in all case. For example:
|
||||
Unfortunately, Spack's RPATH support does not work in every case. For example:
|
||||
|
||||
#. Software comes in many forms --- not just compiled ELF binaries,
|
||||
but also as interpreted code in Python, R, JVM bytecode, etc.
|
||||
|
49
lib/spack/env/cc
vendored
49
lib/spack/env/cc
vendored
@@ -40,6 +40,14 @@ parameters=(
|
||||
SPACK_SYSTEM_DIRS
|
||||
)
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
# Boolean (true/false/custom) if we want to add debug flags
|
||||
# SPACK_ADD_DEBUG_FLAGS
|
||||
|
||||
# If a custom flag is requested, it will be defined
|
||||
# SPACK_DEBUG_FLAGS
|
||||
|
||||
# The compiler input variables are checked for sanity later:
|
||||
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
|
||||
# The default compiler flags are passed from these variables:
|
||||
@@ -87,6 +95,25 @@ for param in "${parameters[@]}"; do
|
||||
fi
|
||||
done
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [[ -z ${SPACK_ADD_DEBUG_FLAGS+x} ]]; then
|
||||
SPACK_ADD_DEBUG_FLAGS="false"
|
||||
fi
|
||||
|
||||
# SPACK_ADD_DEBUG_FLAGS must be true/false/custom
|
||||
is_valid="false"
|
||||
for param in "true" "false" "custom"; do
|
||||
if [ "$param" == "$SPACK_ADD_DEBUG_FLAGS" ]; then
|
||||
is_valid="true"
|
||||
fi
|
||||
done
|
||||
|
||||
# Exit with error if we are given an incorrect value
|
||||
if [ "$is_valid" == "false" ]; then
|
||||
die "SPACK_ADD_DEBUG_FLAGS, if defined, must be one of 'true' 'false' or 'custom'"
|
||||
fi
|
||||
|
||||
# Figure out the type of compiler, the language, and the mode so that
|
||||
# the compiler script knows what to do.
|
||||
#
|
||||
@@ -106,32 +133,37 @@ comp="CC"
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ld)
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
;;
|
||||
*)
|
||||
@@ -215,7 +247,7 @@ export PATH=""
|
||||
for dir in "${env_path[@]}"; do
|
||||
addpath=true
|
||||
for env_dir in "${spack_env_dirs[@]}"; do
|
||||
if [[ "$dir" == "$env_dir" ]]; then
|
||||
if [[ "${dir%%/}" == "$env_dir" ]]; then
|
||||
addpath=false
|
||||
break
|
||||
fi
|
||||
@@ -415,6 +447,16 @@ done
|
||||
#
|
||||
flags=()
|
||||
|
||||
# Add debug flags
|
||||
if [ "${SPACK_ADD_DEBUG_FLAGS}" == "true" ]; then
|
||||
flags=("${flags[@]}" "${debug_flags}")
|
||||
|
||||
# If a custom flag is requested, derive from environment
|
||||
elif [ "$SPACK_ADD_DEBUG_FLAGS" == "custom" ]; then
|
||||
IFS=' ' read -ra SPACK_DEBUG_FLAGS <<< "$SPACK_DEBUG_FLAGS"
|
||||
flags=("${flags[@]}" "${SPACK_DEBUG_FLAGS[@]}")
|
||||
fi
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
@@ -574,6 +616,9 @@ if [[ $SPACK_TEST_COMMAND == dump-args ]]; then
|
||||
IFS="
|
||||
" && echo "${full_command[*]}"
|
||||
exit
|
||||
elif [[ $SPACK_TEST_COMMAND =~ dump-env-* ]]; then
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
echo "$0: $var: ${!var}"
|
||||
elif [[ -n $SPACK_TEST_COMMAND ]]; then
|
||||
die "ERROR: Unknown test command"
|
||||
fi
|
||||
|
1
lib/spack/env/ld.gold
vendored
Symbolic link
1
lib/spack/env/ld.gold
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/ld.lld
vendored
Symbolic link
1
lib/spack/env/ld.lld
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -11,7 +11,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 26dec9d47e509daf8c970de4c89da200da52ad20)
|
||||
* Version: 0.1.2 (commit 4dbf253daf37e4a008e4beb6489f347b4a35aed4)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
@@ -1942,6 +1942,12 @@
|
||||
"versions": "5:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "20:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@@ -326,7 +326,7 @@ def end_function(self, prog=None):
|
||||
"""Returns the syntax needed to end a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str, optional): the command name
|
||||
prog (str or None): the command name
|
||||
|
||||
Returns:
|
||||
str: the function definition ending
|
||||
|
@@ -444,7 +444,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (function): function indicating which files to ignore
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
@@ -518,7 +518,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
src (str): the directory to install
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (function): function indicating which files to ignore
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
@@ -557,12 +557,12 @@ def mkdirp(*paths, **kwargs):
|
||||
paths (str): paths to create with mkdirp
|
||||
|
||||
Keyword Aguments:
|
||||
mode (permission bits or None, optional): optional permissions to set
|
||||
mode (permission bits or None): optional permissions to set
|
||||
on the created directory -- use OS default if not provided
|
||||
group (group name or None, optional): optional group for permissions of
|
||||
group (group name or None): optional group for permissions of
|
||||
final created directory -- use OS default if not provided. Only
|
||||
used if world write permissions are not set
|
||||
default_perms ('parents' or 'args', optional): The default permissions
|
||||
default_perms (str or None): one of 'parents' or 'args'. The default permissions
|
||||
that are set for directories that are not themselves an argument
|
||||
for mkdirp. 'parents' means intermediate directories get the
|
||||
permissions of their direct parent directory, 'args' means
|
||||
@@ -692,7 +692,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
|
||||
try:
|
||||
yield tmp_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as e:
|
||||
except (Exception, KeyboardInterrupt, SystemExit):
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
@@ -701,10 +701,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
tty.debug('DIRECTORY RECOVERED [{0}]'.format(directory_name))
|
||||
|
||||
msg = 'the transactional move of "{0}" failed.'
|
||||
msg += '\n ' + str(e)
|
||||
raise RuntimeError(msg.format(directory_name))
|
||||
raise
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmp_dir)
|
||||
@@ -866,7 +863,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
Keyword Arguments:
|
||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||
values are 'pre' and 'post'
|
||||
ignore (function): function indicating which files to ignore
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
follow_nonexisting (bool): Whether to descend into directories in
|
||||
``src`` that do not exit in ``dest``. Default is True
|
||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||
@@ -1102,23 +1099,23 @@ def find(root, files, recursive=True):
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
======= ====================================
|
||||
Pattern Meaning
|
||||
======= ====================================
|
||||
* matches everything
|
||||
? matches any single character
|
||||
[seq] matches any character in ``seq``
|
||||
[!seq] matches any character not in ``seq``
|
||||
======= ====================================
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches everything
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or Sequence): Library name(s) to search for
|
||||
recurse (bool, optional): if False search only root folder,
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
Returns:
|
||||
list of strings: The files that have been found
|
||||
list: The files that have been found
|
||||
"""
|
||||
if isinstance(files, six.string_types):
|
||||
files = [files]
|
||||
@@ -1200,7 +1197,7 @@ def directories(self):
|
||||
['/dir1', '/dir2']
|
||||
|
||||
Returns:
|
||||
list of strings: A list of directories
|
||||
list: A list of directories
|
||||
"""
|
||||
return list(dedupe(
|
||||
os.path.dirname(x) for x in self.files if os.path.dirname(x)
|
||||
@@ -1218,7 +1215,7 @@ def basenames(self):
|
||||
['a.h', 'b.h']
|
||||
|
||||
Returns:
|
||||
list of strings: A list of base-names
|
||||
list: A list of base-names
|
||||
"""
|
||||
return list(dedupe(os.path.basename(x) for x in self.files))
|
||||
|
||||
@@ -1305,7 +1302,7 @@ def headers(self):
|
||||
"""Stable de-duplication of the headers.
|
||||
|
||||
Returns:
|
||||
list of strings: A list of header files
|
||||
list: A list of header files
|
||||
"""
|
||||
return self.files
|
||||
|
||||
@@ -1318,7 +1315,7 @@ def names(self):
|
||||
['a', 'b']
|
||||
|
||||
Returns:
|
||||
list of strings: A list of files without extensions
|
||||
list: A list of files without extensions
|
||||
"""
|
||||
names = []
|
||||
|
||||
@@ -1409,9 +1406,9 @@ def find_headers(headers, root, recursive=False):
|
||||
======= ====================================
|
||||
|
||||
Parameters:
|
||||
headers (str or list of str): Header name(s) to search for
|
||||
headers (str or list): Header name(s) to search for
|
||||
root (str): The root directory to start searching from
|
||||
recursive (bool, optional): if False search only root folder,
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
|
||||
Returns:
|
||||
@@ -1447,7 +1444,7 @@ def find_all_headers(root):
|
||||
in the directory passed as argument.
|
||||
|
||||
Args:
|
||||
root (path): directory where to look recursively for header files
|
||||
root (str): directory where to look recursively for header files
|
||||
|
||||
Returns:
|
||||
List of all headers found in ``root`` and subdirectories.
|
||||
@@ -1467,7 +1464,7 @@ def libraries(self):
|
||||
"""Stable de-duplication of library files.
|
||||
|
||||
Returns:
|
||||
list of strings: A list of library files
|
||||
list: A list of library files
|
||||
"""
|
||||
return self.files
|
||||
|
||||
@@ -1480,7 +1477,7 @@ def names(self):
|
||||
['a', 'b']
|
||||
|
||||
Returns:
|
||||
list of strings: A list of library names
|
||||
list: A list of library names
|
||||
"""
|
||||
names = []
|
||||
|
||||
@@ -1565,8 +1562,8 @@ def find_system_libraries(libraries, shared=True):
|
||||
======= ====================================
|
||||
|
||||
Parameters:
|
||||
libraries (str or list of str): Library name(s) to search for
|
||||
shared (bool, optional): if True searches for shared libraries,
|
||||
libraries (str or list): Library name(s) to search for
|
||||
shared (bool): if True searches for shared libraries,
|
||||
otherwise for static. Defaults to True.
|
||||
|
||||
Returns:
|
||||
@@ -1616,11 +1613,11 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
======= ====================================
|
||||
|
||||
Parameters:
|
||||
libraries (str or list of str): Library name(s) to search for
|
||||
libraries (str or list): Library name(s) to search for
|
||||
root (str): The root directory to start searching from
|
||||
shared (bool, optional): if True searches for shared libraries,
|
||||
shared (bool): if True searches for shared libraries,
|
||||
otherwise for static. Defaults to True.
|
||||
recursive (bool, optional): if False search only root folder,
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
|
||||
Returns:
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -31,23 +30,6 @@
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
||||
|
||||
# On macOS, Python 3.8 multiprocessing now defaults to the 'spawn' start
|
||||
# method. Spack cannot currently handle this, so force the process to start
|
||||
# using the 'fork' start method.
|
||||
#
|
||||
# TODO: This solution is not ideal, as the 'fork' start method can lead to
|
||||
# crashes of the subprocess. Figure out how to make 'spawn' work.
|
||||
#
|
||||
# See:
|
||||
# * https://github.com/spack/spack/pull/18124
|
||||
# * https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods # noqa: E501
|
||||
# * https://bugs.python.org/issue33725
|
||||
if sys.version_info >= (3,): # novm
|
||||
fork_context = multiprocessing.get_context('fork')
|
||||
else:
|
||||
fork_context = multiprocessing
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
"""Create a hierarchy of dictionaries by splitting the supplied
|
||||
set of objects on unique values of the supplied functions.
|
||||
@@ -258,6 +240,47 @@ def new_dec(*args, **kwargs):
|
||||
return new_dec
|
||||
|
||||
|
||||
def key_ordering(cls):
|
||||
"""Decorates a class with extra methods that implement rich comparison
|
||||
operations and ``__hash__``. The decorator assumes that the class
|
||||
implements a function called ``_cmp_key()``. The rich comparison
|
||||
operations will compare objects using this key, and the ``__hash__``
|
||||
function will return the hash of this key.
|
||||
|
||||
If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
|
||||
``__gt__``, or ``__ge__`` defined, this decorator will overwrite them.
|
||||
|
||||
Raises:
|
||||
TypeError: If the class does not have a ``_cmp_key`` method
|
||||
"""
|
||||
def setter(name, value):
|
||||
value.__name__ = name
|
||||
setattr(cls, name, value)
|
||||
|
||||
if not has_method(cls, '_cmp_key'):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
|
||||
setter('__eq__',
|
||||
lambda s, o:
|
||||
(s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter('__lt__',
|
||||
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
setter('__le__',
|
||||
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
|
||||
|
||||
setter('__ne__',
|
||||
lambda s, o:
|
||||
(s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
|
||||
setter('__gt__',
|
||||
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
|
||||
setter('__ge__',
|
||||
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
|
||||
|
||||
setter('__hash__', lambda self: hash(self._cmp_key()))
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
#: sentinel for testing that iterators are done in lazy_lexicographic_ordering
|
||||
done = object()
|
||||
|
||||
@@ -573,8 +596,8 @@ def pretty_date(time, now=None):
|
||||
"""Convert a datetime or timestamp to a pretty, relative date.
|
||||
|
||||
Args:
|
||||
time (datetime or int): date to print prettily
|
||||
now (datetime): dateimte for 'now', i.e. the date the pretty date
|
||||
time (datetime.datetime or int): date to print prettily
|
||||
now (datetime.datetime): datetime for 'now', i.e. the date the pretty date
|
||||
is relative to (default is datetime.now())
|
||||
|
||||
Returns:
|
||||
@@ -648,7 +671,7 @@ def pretty_string_to_date(date_str, now=None):
|
||||
or be a *pretty date* (like ``yesterday`` or ``two months ago``)
|
||||
|
||||
Returns:
|
||||
(datetime): datetime object corresponding to ``date_str``
|
||||
(datetime.datetime): datetime object corresponding to ``date_str``
|
||||
"""
|
||||
|
||||
pattern = {}
|
||||
@@ -892,3 +915,19 @@ class Devnull(object):
|
||||
"""
|
||||
def write(self, *_):
|
||||
pass
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
@@ -9,14 +9,25 @@
|
||||
import socket
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.util.string
|
||||
|
||||
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
|
||||
'LockError', 'LockTimeoutError',
|
||||
'LockPermissionError', 'LockROFileError', 'CantCreateLockError']
|
||||
__all__ = [
|
||||
'Lock',
|
||||
'LockDowngradeError',
|
||||
'LockUpgradeError',
|
||||
'LockTransaction',
|
||||
'WriteTransaction',
|
||||
'ReadTransaction',
|
||||
'LockError',
|
||||
'LockTimeoutError',
|
||||
'LockPermissionError',
|
||||
'LockROFileError',
|
||||
'CantCreateLockError'
|
||||
]
|
||||
|
||||
#: Mapping of supported locks to description
|
||||
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
|
||||
@@ -26,6 +37,126 @@
|
||||
true_fn = lambda: True
|
||||
|
||||
|
||||
class OpenFile(object):
|
||||
"""Record for keeping track of open lockfiles (with reference counting).
|
||||
|
||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||
filehandle here as it's the thing we end up using in python code. You can get
|
||||
the file descriptor from the file handle if needed -- or we could make this track
|
||||
file descriptors as well in the future.
|
||||
"""
|
||||
def __init__(self, fh):
|
||||
self.fh = fh
|
||||
self.refs = 0
|
||||
|
||||
|
||||
class OpenFileTracker(object):
|
||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||
|
||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||
This is convenient, because if a process exits, it releases its locks.
|
||||
Unfortunately, this also means that if you close a file, *all* locks associated
|
||||
with that file's inode are released, regardless of whether the process has any
|
||||
other open file descriptors on it.
|
||||
|
||||
Because of this, we need to track open lock files so that we only close them when
|
||||
a process no longer needs them. We do this by tracking each lockfile by its
|
||||
inode and process id. This has several nice properties:
|
||||
|
||||
1. Tracking by pid ensures that, if we fork, we don't inadvertently track the parent
|
||||
process's lockfiles. ``fcntl`` locks are not inherited across forks, so we'll
|
||||
just track new lockfiles in the child.
|
||||
2. Tracking by inode ensures that referencs are counted per inode, and that we don't
|
||||
inadvertently close a file whose inode still has open locks.
|
||||
3. Tracking by both pid and inode ensures that we only open lockfiles the minimum
|
||||
number of times necessary for the locks we have.
|
||||
|
||||
Note: as mentioned elsewhere, these locks aren't thread safe -- they're designed to
|
||||
work in Python and assume the GIL.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Create a new ``OpenFileTracker``."""
|
||||
self._descriptors = {} # type: Dict[Tuple[int, int], OpenFile]
|
||||
|
||||
def get_fh(self, path):
|
||||
"""Get a filehandle for a lockfile.
|
||||
|
||||
This routine will open writable files for read/write even if you're asking
|
||||
for a shared (read-only) lock. This is so that we can upgrade to an exclusive
|
||||
(write) lock later if requested.
|
||||
|
||||
Arguments:
|
||||
path (str): path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), 'r+'
|
||||
|
||||
pid = os.getpid()
|
||||
open_file = None # OpenFile object, if there is one
|
||||
stat = None # stat result for the lockfile, if it exists
|
||||
|
||||
try:
|
||||
# see whether we've seen this inode/pid before
|
||||
stat = os.stat(path)
|
||||
key = (stat.st_ino, pid)
|
||||
open_file = self._descriptors.get(key)
|
||||
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # only handle file not found
|
||||
raise
|
||||
|
||||
# path does not exist -- fail if we won't be able to create it
|
||||
parent = os.path.dirname(path) or '.'
|
||||
if not os.access(parent, os.W_OK):
|
||||
raise CantCreateLockError(path)
|
||||
|
||||
# if there was no already open file, we'll need to open one
|
||||
if not open_file:
|
||||
if stat and not os.access(path, os.W_OK):
|
||||
# we know path exists but not if it's writable. If it's read-only,
|
||||
# only open the file for reading (and fail if we're trying to get
|
||||
# an exclusive (write) lock on it)
|
||||
os_mode, fh_mode = os.O_RDONLY, 'r'
|
||||
|
||||
fd = os.open(path, os_mode)
|
||||
fh = os.fdopen(fd, fh_mode)
|
||||
open_file = OpenFile(fh)
|
||||
|
||||
# if we just created the file, we'll need to get its inode here
|
||||
if not stat:
|
||||
inode = os.fstat(fd).st_ino
|
||||
key = (inode, pid)
|
||||
|
||||
self._descriptors[key] = open_file
|
||||
|
||||
open_file.refs += 1
|
||||
return open_file.fh
|
||||
|
||||
def release_fh(self, path):
|
||||
"""Release a filehandle, only closing it if there are no more references."""
|
||||
try:
|
||||
inode = os.stat(path).st_ino
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # only handle file not found
|
||||
raise
|
||||
inode = None # this will not be in self._descriptors
|
||||
|
||||
key = (inode, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing lock path: %s" % path
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
del self._descriptors[key]
|
||||
open_file.fh.close()
|
||||
|
||||
|
||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||
#: from opening the sam file many times for different byte range locks
|
||||
file_tracker = OpenFileTracker()
|
||||
|
||||
|
||||
def _attempts_str(wait_time, nattempts):
|
||||
# Don't print anything if we succeeded on the first try
|
||||
if nattempts <= 1:
|
||||
@@ -46,7 +177,8 @@ class Lock(object):
|
||||
Note that this is for managing contention over resources *between*
|
||||
processes and not for managing contention between threads in a process: the
|
||||
functions of this object are not thread-safe. A process also must not
|
||||
maintain multiple locks on the same file.
|
||||
maintain multiple locks on the same file (or, more specifically, on
|
||||
overlapping byte ranges in the same file).
|
||||
"""
|
||||
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None,
|
||||
@@ -151,25 +283,10 @@ def _lock(self, op, timeout=None):
|
||||
|
||||
# Create file and parent directories if they don't exist.
|
||||
if self._file is None:
|
||||
parent = self._ensure_parent_directory()
|
||||
self._ensure_parent_directory()
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fd_mode = (os.O_RDWR | os.O_CREAT), 'r+'
|
||||
if os.path.exists(self.path):
|
||||
if not os.access(self.path, os.W_OK):
|
||||
if op == fcntl.LOCK_SH:
|
||||
# can still lock read-only files if we open 'r'
|
||||
os_mode, fd_mode = os.O_RDONLY, 'r'
|
||||
else:
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
elif not os.access(parent, os.W_OK):
|
||||
raise CantCreateLockError(self.path)
|
||||
|
||||
fd = os.open(self.path, os_mode)
|
||||
self._file = os.fdopen(fd, fd_mode)
|
||||
|
||||
elif op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
if op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
@@ -282,7 +399,8 @@ def _unlock(self):
|
||||
"""
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN,
|
||||
self._length, self._start, os.SEEK_SET)
|
||||
self._file.close()
|
||||
|
||||
file_tracker.release_fh(self.path)
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
@@ -401,7 +519,7 @@ def release_read(self, release_fn=None):
|
||||
"""Releases a read lock.
|
||||
|
||||
Arguments:
|
||||
release_fn (callable): function to call *before* the last recursive
|
||||
release_fn (typing.Callable): function to call *before* the last recursive
|
||||
lock (read or write) is released.
|
||||
|
||||
If the last recursive lock will be released, then this will call
|
||||
@@ -437,7 +555,7 @@ def release_write(self, release_fn=None):
|
||||
"""Releases a write lock.
|
||||
|
||||
Arguments:
|
||||
release_fn (callable): function to call before the last recursive
|
||||
release_fn (typing.Callable): function to call before the last recursive
|
||||
write is released.
|
||||
|
||||
If the last recursive *write* lock will be released, then this
|
||||
@@ -533,10 +651,10 @@ class LockTransaction(object):
|
||||
Arguments:
|
||||
lock (Lock): underlying lock for this transaction to be accquired on
|
||||
enter and released on exit
|
||||
acquire (callable or contextmanager): function to be called after lock
|
||||
is acquired, or contextmanager to enter after acquire and leave
|
||||
acquire (typing.Callable or contextlib.contextmanager): function to be called
|
||||
after lock is acquired, or contextmanager to enter after acquire and leave
|
||||
before release.
|
||||
release (callable): function to be called before release. If
|
||||
release (typing.Callable): function to be called before release. If
|
||||
``acquire`` is a contextmanager, this will be called *after*
|
||||
exiting the nexted context and before the lock is released.
|
||||
timeout (float): number of seconds to set for the timeout when
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
@@ -28,6 +29,7 @@
|
||||
_msg_enabled = True
|
||||
_warn_enabled = True
|
||||
_error_enabled = True
|
||||
_output_filter = lambda s: s
|
||||
indent = " "
|
||||
|
||||
|
||||
@@ -90,6 +92,18 @@ def error_enabled():
|
||||
return _error_enabled
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def output_filter(filter_fn):
|
||||
"""Context manager that applies a filter to all output."""
|
||||
global _output_filter
|
||||
saved_filter = _output_filter
|
||||
try:
|
||||
_output_filter = filter_fn
|
||||
yield
|
||||
finally:
|
||||
_output_filter = saved_filter
|
||||
|
||||
|
||||
class SuppressOutput:
|
||||
"""Class for disabling output in a scope using 'with' keyword"""
|
||||
|
||||
@@ -166,13 +180,23 @@ def msg(message, *args, **kwargs):
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(2)
|
||||
if newline:
|
||||
cprint("@*b{%s==>} %s%s" % (
|
||||
st_text, get_timestamp(), cescape(message)))
|
||||
cprint(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
else:
|
||||
cwrite("@*b{%s==>} %s%s" % (
|
||||
st_text, get_timestamp(), cescape(message)))
|
||||
cwrite(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
for arg in args:
|
||||
print(indent + six.text_type(arg))
|
||||
print(indent + _output_filter(six.text_type(arg)))
|
||||
|
||||
|
||||
def info(message, *args, **kwargs):
|
||||
@@ -188,18 +212,29 @@ def info(message, *args, **kwargs):
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(st_countback)
|
||||
cprint("@%s{%s==>} %s%s" % (
|
||||
format, st_text, get_timestamp(), cescape(six.text_type(message))
|
||||
), stream=stream)
|
||||
cprint(
|
||||
"@%s{%s==>} %s%s" % (
|
||||
format,
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(six.text_type(message)))
|
||||
),
|
||||
stream=stream
|
||||
)
|
||||
for arg in args:
|
||||
if wrap:
|
||||
lines = textwrap.wrap(
|
||||
six.text_type(arg), initial_indent=indent,
|
||||
subsequent_indent=indent, break_long_words=break_long_words)
|
||||
_output_filter(six.text_type(arg)),
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
break_long_words=break_long_words
|
||||
)
|
||||
for line in lines:
|
||||
stream.write(line + '\n')
|
||||
else:
|
||||
stream.write(indent + six.text_type(arg) + '\n')
|
||||
stream.write(
|
||||
indent + _output_filter(six.text_type(arg)) + '\n'
|
||||
)
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
|
@@ -109,19 +109,17 @@ def colify(elts, **options):
|
||||
using ``str()``.
|
||||
|
||||
Keyword Arguments:
|
||||
output (stream): A file object to write to. Default is ``sys.stdout``
|
||||
indent (int): Optionally indent all columns by some number of spaces
|
||||
padding (int): Spaces between columns. Default is 2
|
||||
width (int): Width of the output. Default is 80 if tty not detected
|
||||
cols (int): Force number of columns. Default is to size to
|
||||
terminal, or single-column if no tty
|
||||
tty (bool): Whether to attempt to write to a tty. Default is to
|
||||
autodetect a tty. Set to False to force single-column
|
||||
output
|
||||
method (str): Method to use to fit columns. Options are variable or
|
||||
uniform. Variable-width columns are tighter, uniform
|
||||
columns are all the same width and fit less data on
|
||||
the screen
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
indent (int): Optionally indent all columns by some number of spaces
|
||||
padding (int): Spaces between columns. Default is 2
|
||||
width (int): Width of the output. Default is 80 if tty not detected
|
||||
cols (int): Force number of columns. Default is to size to terminal, or
|
||||
single-column if no tty
|
||||
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
|
||||
tty. Set to False to force single-column output
|
||||
method (str): Method to use to fit columns. Options are variable or uniform.
|
||||
Variable-width columns are tighter, uniform columns are all the same width
|
||||
and fit less data on the screen
|
||||
"""
|
||||
# Get keyword arguments or set defaults
|
||||
cols = options.pop("cols", 0)
|
||||
|
@@ -436,7 +436,7 @@ class log_output(object):
|
||||
"""
|
||||
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
env=None):
|
||||
env=None, filter_fn=None):
|
||||
"""Create a new output log context manager.
|
||||
|
||||
Args:
|
||||
@@ -446,6 +446,8 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
debug (int): positive to enable tty debug mode during logging
|
||||
buffer (bool): pass buffer=True to skip unbuffering output; note
|
||||
this doesn't set up any *new* buffering
|
||||
filter_fn (callable, optional): Callable[str] -> str to filter each
|
||||
line of output
|
||||
|
||||
log_output can take either a file object or a filename. If a
|
||||
filename is passed, the file will be opened and closed entirely
|
||||
@@ -465,6 +467,7 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
self.debug = debug
|
||||
self.buffer = buffer
|
||||
self.env = env # the environment to use for _writer_daemon
|
||||
self.filter_fn = filter_fn
|
||||
|
||||
self._active = False # used to prevent re-entry
|
||||
|
||||
@@ -530,20 +533,22 @@ def __enter__(self):
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_multiprocess_fd = None
|
||||
try:
|
||||
input_multiprocess_fd = MultiProcessFd(
|
||||
os.dup(sys.stdin.fileno())
|
||||
)
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(
|
||||
os.dup(sys.stdin.fileno())
|
||||
)
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
input_multiprocess_fd = None
|
||||
pass
|
||||
|
||||
with replace_environment(self.env):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_multiprocess_fd, read_multiprocess_fd, write_fd,
|
||||
self.echo, self.log_file, child_pipe
|
||||
self.echo, self.log_file, child_pipe, self.filter_fn
|
||||
)
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
@@ -667,7 +672,7 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
log_file_wrapper, control_pipe):
|
||||
log_file_wrapper, control_pipe, filter_fn):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -712,6 +717,7 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
|
||||
"""
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
@@ -784,7 +790,10 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
|
||||
# Echo to stdout if requested or forced.
|
||||
if echo or force_echo:
|
||||
sys.stdout.write(clean_line)
|
||||
output_line = clean_line
|
||||
if filter_fn:
|
||||
output_line = filter_fn(clean_line)
|
||||
sys.stdout.write(output_line)
|
||||
|
||||
# Stripped output to log file.
|
||||
log_file.write(_strip(clean_line))
|
||||
|
@@ -3,9 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 16, 1)
|
||||
spack_version_info = (0, 16, 2)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
@@ -2,28 +2,24 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""
|
||||
This module contains all the elements that are required to create an
|
||||
architecture object. These include, the target processor, the operating system,
|
||||
and the architecture platform (i.e. cray, darwin, linux, etc) classes.
|
||||
"""Aggregate the target processor, the operating system and the target
|
||||
platform into an architecture object.
|
||||
|
||||
On a multiple architecture machine, the architecture spec field can be set to
|
||||
build a package against any target and operating system that is present on the
|
||||
platform. On Cray platforms or any other architecture that has different front
|
||||
and back end environments, the operating system will determine the method of
|
||||
compiler
|
||||
detection.
|
||||
compiler detection.
|
||||
|
||||
There are two different types of compiler detection:
|
||||
|
||||
1. Through the $PATH env variable (front-end detection)
|
||||
2. Through the tcl module system. (back-end detection)
|
||||
2. Through the module system. (back-end detection)
|
||||
|
||||
Depending on which operating system is specified, the compiler will be detected
|
||||
using one of those methods.
|
||||
|
||||
For platforms such as linux and darwin, the operating system is autodetected
|
||||
and the target is set to be x86_64.
|
||||
For platforms such as linux and darwin, the operating system is autodetected.
|
||||
|
||||
The command line syntax for specifying an architecture is as follows:
|
||||
|
||||
@@ -33,10 +29,8 @@
|
||||
the command line and Spack will concretize using the default. These defaults
|
||||
are set in the 'platforms/' directory which contains the different subclasses
|
||||
for platforms. If the machine has multiple architectures, the user can
|
||||
also enter front-end, or fe or back-end or be. These settings will concretize
|
||||
to their respective front-end and back-end targets and operating systems.
|
||||
Additional platforms can be added by creating a subclass of Platform
|
||||
and adding it inside the platform directory.
|
||||
also enter frontend, or fe or backend or be. These settings will concretize
|
||||
to their respective frontend and backend targets and operating systems.
|
||||
|
||||
Platforms are an abstract class that are extended by subclasses. If the user
|
||||
wants to add a new type of platform (such as cray_xe), they can create a
|
||||
@@ -47,335 +41,30 @@
|
||||
new platform is added and the user wants that to be detected first.
|
||||
|
||||
Targets are created inside the platform subclasses. Most architecture
|
||||
(like linux, and darwin) will have only one target (x86_64) but in the case of
|
||||
(like linux, and darwin) will have only one target family (x86_64) but in the case of
|
||||
Cray machines, there is both a frontend and backend processor. The user can
|
||||
specify which targets are present on front-end and back-end architecture
|
||||
|
||||
Depending on the platform, operating systems are either auto-detected or are
|
||||
set. The user can set the front-end and back-end operating setting by the class
|
||||
Depending on the platform, operating systems are either autodetected or are
|
||||
set. The user can set the frontend and backend operating setting by the class
|
||||
attributes front_os and back_os. The operating system as described earlier,
|
||||
will be responsible for compiler detection.
|
||||
"""
|
||||
import contextlib
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.error as serr
|
||||
import spack.paths
|
||||
import spack.util.classes
|
||||
import spack.util.executable
|
||||
import spack.operating_systems
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.target
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
|
||||
class NoPlatformError(serr.SpackError):
|
||||
def __init__(self):
|
||||
super(NoPlatformError, self).__init__(
|
||||
"Could not determine a platform for this machine.")
|
||||
|
||||
|
||||
def _ensure_other_is_target(method):
|
||||
"""Decorator to be used in dunder methods taking a single argument to
|
||||
ensure that the argument is an instance of ``Target`` too.
|
||||
"""
|
||||
@functools.wraps(method)
|
||||
def _impl(self, other):
|
||||
if isinstance(other, six.string_types):
|
||||
other = Target(other)
|
||||
|
||||
if not isinstance(other, Target):
|
||||
return NotImplemented
|
||||
|
||||
return method(self, other)
|
||||
|
||||
return _impl
|
||||
|
||||
|
||||
class Target(object):
|
||||
def __init__(self, name, module_name=None):
|
||||
"""Target models microarchitectures and their compatibility.
|
||||
|
||||
Args:
|
||||
name (str or Microarchitecture):micro-architecture of the
|
||||
target
|
||||
module_name (str): optional module name to get access to the
|
||||
current target. This is typically used on machines
|
||||
like Cray (e.g. craype-compiler)
|
||||
"""
|
||||
if not isinstance(name, archspec.cpu.Microarchitecture):
|
||||
name = archspec.cpu.TARGETS.get(
|
||||
name, archspec.cpu.generic_microarchitecture(name)
|
||||
)
|
||||
self.microarchitecture = name
|
||||
self.module_name = module_name
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.microarchitecture.name
|
||||
|
||||
@_ensure_other_is_target
|
||||
def __eq__(self, other):
|
||||
return self.microarchitecture == other.microarchitecture and \
|
||||
self.module_name == other.module_name
|
||||
|
||||
def __ne__(self, other):
|
||||
# This method is necessary as long as we support Python 2. In Python 3
|
||||
# __ne__ defaults to the implementation below
|
||||
return not self == other
|
||||
|
||||
@_ensure_other_is_target
|
||||
def __lt__(self, other):
|
||||
# TODO: In the future it would be convenient to say
|
||||
# TODO: `spec.architecture.target < other.architecture.target`
|
||||
# TODO: and change the semantic of the comparison operators
|
||||
|
||||
# This is needed to sort deterministically specs in a list.
|
||||
# It doesn't implement a total ordering semantic.
|
||||
return self.microarchitecture.name < other.microarchitecture.name
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name, self.module_name))
|
||||
|
||||
@staticmethod
|
||||
def from_dict_or_value(dict_or_value):
|
||||
# A string here represents a generic target (like x86_64 or ppc64) or
|
||||
# a custom micro-architecture
|
||||
if isinstance(dict_or_value, six.string_types):
|
||||
return Target(dict_or_value)
|
||||
|
||||
# TODO: From a dict we actually retrieve much more information than
|
||||
# TODO: just the name. We can use that information to reconstruct an
|
||||
# TODO: "old" micro-architecture or check the current definition.
|
||||
target_info = dict_or_value
|
||||
return Target(target_info['name'])
|
||||
|
||||
def to_dict_or_value(self):
|
||||
"""Returns a dict or a value representing the current target.
|
||||
|
||||
String values are used to keep backward compatibility with generic
|
||||
targets, like e.g. x86_64 or ppc64. More specific micro-architectures
|
||||
will return a dictionary which contains information on the name,
|
||||
features, vendor, generation and parents of the current target.
|
||||
"""
|
||||
# Generic targets represent either an architecture
|
||||
# family (like x86_64) or a custom micro-architecture
|
||||
if self.microarchitecture.vendor == 'generic':
|
||||
return str(self)
|
||||
|
||||
return syaml_dict(
|
||||
self.microarchitecture.to_dict(return_list_of_items=True)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = cls_name + '({0}, {1})'
|
||||
return fmt.format(repr(self.microarchitecture),
|
||||
repr(self.module_name))
|
||||
|
||||
def __str__(self):
|
||||
return str(self.microarchitecture)
|
||||
|
||||
def __contains__(self, cpu_flag):
|
||||
return cpu_flag in self.microarchitecture
|
||||
|
||||
def optimization_flags(self, compiler):
|
||||
"""Returns the flags needed to optimize for this target using
|
||||
the compiler passed as argument.
|
||||
|
||||
Args:
|
||||
compiler (CompilerSpec or Compiler): object that contains both the
|
||||
name and the version of the compiler we want to use
|
||||
"""
|
||||
# Mixed toolchains are not supported yet
|
||||
import spack.compilers
|
||||
if isinstance(compiler, spack.compiler.Compiler):
|
||||
if spack.compilers.is_mixed_toolchain(compiler):
|
||||
msg = ('microarchitecture specific optimizations are not '
|
||||
'supported yet on mixed compiler toolchains [check'
|
||||
' {0.name}@{0.version} for further details]')
|
||||
warnings.warn(msg.format(compiler))
|
||||
return ''
|
||||
|
||||
# Try to check if the current compiler comes with a version number or
|
||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||
# custom spec.
|
||||
compiler_version = compiler.version
|
||||
version_number, suffix = archspec.cpu.version_components(
|
||||
compiler.version
|
||||
)
|
||||
if not version_number or suffix not in ('', 'apple'):
|
||||
# Try to deduce the underlying version of the compiler, regardless
|
||||
# of its name in compilers.yaml. Depending on where this function
|
||||
# is called we might get either a CompilerSpec or a fully fledged
|
||||
# compiler object.
|
||||
import spack.spec
|
||||
if isinstance(compiler, spack.spec.CompilerSpec):
|
||||
compiler = spack.compilers.compilers_for_spec(compiler).pop()
|
||||
try:
|
||||
compiler_version = compiler.real_version
|
||||
except spack.util.executable.ProcessError as e:
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
|
||||
return self.microarchitecture.optimization_flags(
|
||||
compiler.name, str(compiler_version)
|
||||
)
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class Platform(object):
|
||||
""" Abstract class that each type of Platform will subclass.
|
||||
Will return a instance of it once it is returned.
|
||||
"""
|
||||
|
||||
# Subclass sets number. Controls detection order
|
||||
priority = None # type: int
|
||||
|
||||
#: binary formats used on this platform; used by relocation logic
|
||||
binary_formats = ['elf']
|
||||
|
||||
front_end = None # type: str
|
||||
back_end = None # type: str
|
||||
default = None # type: str # The default back end target.
|
||||
|
||||
front_os = None # type: str
|
||||
back_os = None # type: str
|
||||
default_os = None # type: str
|
||||
|
||||
reserved_targets = ['default_target', 'frontend', 'fe', 'backend', 'be']
|
||||
reserved_oss = ['default_os', 'frontend', 'fe', 'backend', 'be']
|
||||
|
||||
def __init__(self, name):
|
||||
self.targets = {}
|
||||
self.operating_sys = {}
|
||||
self.name = name
|
||||
|
||||
def add_target(self, name, target):
|
||||
"""Used by the platform specific subclass to list available targets.
|
||||
Raises an error if the platform specifies a name
|
||||
that is reserved by spack as an alias.
|
||||
"""
|
||||
if name in Platform.reserved_targets:
|
||||
raise ValueError(
|
||||
"%s is a spack reserved alias "
|
||||
"and cannot be the name of a target" % name)
|
||||
self.targets[name] = target
|
||||
|
||||
def target(self, name):
|
||||
"""This is a getter method for the target dictionary
|
||||
that handles defaulting based on the values provided by default,
|
||||
front-end, and back-end. This can be overwritten
|
||||
by a subclass for which we want to provide further aliasing options.
|
||||
"""
|
||||
# TODO: Check if we can avoid using strings here
|
||||
name = str(name)
|
||||
if name == 'default_target':
|
||||
name = self.default
|
||||
elif name == 'frontend' or name == 'fe':
|
||||
name = self.front_end
|
||||
elif name == 'backend' or name == 'be':
|
||||
name = self.back_end
|
||||
|
||||
return self.targets.get(name, None)
|
||||
|
||||
def add_operating_system(self, name, os_class):
|
||||
""" Add the operating_system class object into the
|
||||
platform.operating_sys dictionary
|
||||
"""
|
||||
if name in Platform.reserved_oss:
|
||||
raise ValueError(
|
||||
"%s is a spack reserved alias "
|
||||
"and cannot be the name of an OS" % name)
|
||||
self.operating_sys[name] = os_class
|
||||
|
||||
def operating_system(self, name):
|
||||
if name == 'default_os':
|
||||
name = self.default_os
|
||||
if name == 'frontend' or name == "fe":
|
||||
name = self.front_os
|
||||
if name == 'backend' or name == 'be':
|
||||
name = self.back_os
|
||||
|
||||
return self.operating_sys.get(name, None)
|
||||
|
||||
@classmethod
|
||||
def setup_platform_environment(cls, pkg, env):
|
||||
""" Subclass can override this method if it requires any
|
||||
platform-specific build environment modifications.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
""" Subclass is responsible for implementing this method.
|
||||
Returns True if the Platform class detects that
|
||||
it is the current platform
|
||||
and False if it's not.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.name
|
||||
yield self.default
|
||||
yield self.front_end
|
||||
yield self.back_end
|
||||
yield self.default_os
|
||||
yield self.front_os
|
||||
yield self.back_os
|
||||
|
||||
def targets():
|
||||
for t in sorted(self.targets.values()):
|
||||
yield t._cmp_iter
|
||||
yield targets
|
||||
|
||||
def oses():
|
||||
for o in sorted(self.operating_sys.values()):
|
||||
yield o._cmp_iter
|
||||
yield oses
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class OperatingSystem(object):
|
||||
""" Operating System will be like a class similar to platform extended
|
||||
by subclasses for the specifics. Operating System will contain the
|
||||
compiler finding logic. Instead of calling two separate methods to
|
||||
find compilers we call find_compilers method for each operating system
|
||||
"""
|
||||
|
||||
def __init__(self, name, version):
|
||||
self.name = name.replace('-', '_')
|
||||
self.version = str(version).replace('-', '_')
|
||||
|
||||
def __str__(self):
|
||||
return "%s%s" % (self.name, self.version)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.name
|
||||
yield self.version
|
||||
|
||||
def to_dict(self):
|
||||
return syaml_dict([
|
||||
('name', self.name),
|
||||
('version', self.version)
|
||||
])
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
@@ -400,11 +89,13 @@ def __init__(self, plat=None, os=None, target=None):
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
return all((self.platform is not None,
|
||||
isinstance(self.platform, Platform),
|
||||
self.os is not None,
|
||||
isinstance(self.os, OperatingSystem),
|
||||
self.target is not None, isinstance(self.target, Target)))
|
||||
return all(
|
||||
(self.platform is not None,
|
||||
isinstance(self.platform, spack.platforms.Platform),
|
||||
self.os is not None,
|
||||
isinstance(self.os, spack.operating_systems.OperatingSystem),
|
||||
self.target is not None, isinstance(self.target, spack.target.Target))
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
if self.platform or self.os or self.target:
|
||||
@@ -429,28 +120,28 @@ def __nonzero__(self):
|
||||
__bool__ = __nonzero__
|
||||
|
||||
def _cmp_iter(self):
|
||||
if isinstance(self.platform, Platform):
|
||||
if isinstance(self.platform, spack.platforms.Platform):
|
||||
yield self.platform.name
|
||||
else:
|
||||
yield self.platform
|
||||
|
||||
if isinstance(self.os, OperatingSystem):
|
||||
if isinstance(self.os, spack.operating_systems.OperatingSystem):
|
||||
yield self.os.name
|
||||
else:
|
||||
yield self.os
|
||||
|
||||
if isinstance(self.target, Target):
|
||||
if isinstance(self.target, spack.target.Target):
|
||||
yield self.target.microarchitecture
|
||||
else:
|
||||
yield self.target
|
||||
|
||||
def to_dict(self):
|
||||
str_or_none = lambda v: str(v) if v else None
|
||||
d = syaml_dict([
|
||||
d = syaml.syaml_dict([
|
||||
('platform', str_or_none(self.platform)),
|
||||
('platform_os', str_or_none(self.os)),
|
||||
('target', self.target.to_dict_or_value())])
|
||||
return syaml_dict([('arch', d)])
|
||||
return syaml.syaml_dict([('arch', d)])
|
||||
|
||||
def to_spec(self):
|
||||
"""Convert this Arch to an anonymous Spec with architecture defined."""
|
||||
@@ -464,64 +155,25 @@ def from_dict(d):
|
||||
return arch_for_spec(spec)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def get_platform(platform_name):
|
||||
"""Returns a platform object that corresponds to the given name."""
|
||||
platform_list = all_platforms()
|
||||
for p in platform_list:
|
||||
if platform_name.replace("_", "").lower() == p.__name__.lower():
|
||||
return p()
|
||||
|
||||
|
||||
def verify_platform(platform_name):
|
||||
""" Determines whether or not the platform with the given name is supported
|
||||
in Spack. For more information, see the 'spack.platforms' submodule.
|
||||
"""
|
||||
platform_name = platform_name.replace("_", "").lower()
|
||||
platform_names = [p.__name__.lower() for p in all_platforms()]
|
||||
|
||||
if platform_name not in platform_names:
|
||||
tty.die("%s is not a supported platform; supported platforms are %s" %
|
||||
(platform_name, platform_names))
|
||||
|
||||
|
||||
def arch_for_spec(arch_spec):
|
||||
"""Transforms the given architecture spec into an architecture object."""
|
||||
arch_spec = spack.spec.ArchSpec(arch_spec)
|
||||
assert arch_spec.concrete
|
||||
|
||||
arch_plat = get_platform(arch_spec.platform)
|
||||
arch_plat = spack.platforms.by_name(arch_spec.platform)
|
||||
if not (arch_plat.operating_system(arch_spec.os) and
|
||||
arch_plat.target(arch_spec.target)):
|
||||
raise ValueError(
|
||||
"Can't recreate arch for spec %s on current arch %s; "
|
||||
"spec architecture is too different" % (arch_spec, sys_type()))
|
||||
sys_type = str(default_arch())
|
||||
msg = ("Can't recreate arch for spec {0} on current arch {1}; "
|
||||
"spec architecture is too different")
|
||||
raise ValueError(msg.format(arch_spec, sys_type))
|
||||
|
||||
return Arch(arch_plat, arch_spec.os, arch_spec.target)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _all_platforms():
|
||||
mod_path = spack.paths.platform_path
|
||||
return spack.util.classes.list_classes("spack.platforms", mod_path)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _platform():
|
||||
"""Detects the platform for this machine.
|
||||
|
||||
Gather a list of all available subclasses of platforms.
|
||||
Sorts the list according to their priority looking. Priority is
|
||||
an arbitrarily set number. Detects platform either using uname or
|
||||
a file path (/opt/cray...)
|
||||
"""
|
||||
# Try to create a Platform object using the config file FIRST
|
||||
platform_list = _all_platforms()
|
||||
platform_list.sort(key=lambda a: a.priority)
|
||||
|
||||
for platform_cls in platform_list:
|
||||
if platform_cls.detect():
|
||||
return platform_cls()
|
||||
return spack.platforms.host()
|
||||
|
||||
|
||||
#: The "real" platform of the host running Spack. This should not be changed
|
||||
@@ -532,44 +184,23 @@ def _platform():
|
||||
#: context manager.
|
||||
platform = _platform
|
||||
|
||||
#: The list of all platform classes. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
all_platforms = _all_platforms
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def default_arch():
|
||||
"""Default ``Arch`` object for this machine.
|
||||
|
||||
See ``sys_type()``.
|
||||
"""
|
||||
"""Default ``Arch`` object for this machine"""
|
||||
return Arch(platform(), 'default_os', 'default_target')
|
||||
|
||||
|
||||
def sys_type():
|
||||
"""Print out the "default" platform-os-target tuple for this machine.
|
||||
|
||||
On machines with only one target OS/target, prints out the
|
||||
platform-os-target for the frontend. For machines with a frontend
|
||||
and a backend, prints the default backend.
|
||||
|
||||
TODO: replace with use of more explicit methods to get *all* the
|
||||
backends, as client code should really be aware of cross-compiled
|
||||
architectures.
|
||||
|
||||
"""
|
||||
return str(default_arch())
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def compatible_sys_types():
|
||||
"""Returns a list of all the systypes compatible with the current host."""
|
||||
compatible_archs = []
|
||||
"""Return a list of all the platform-os-target tuples compatible
|
||||
with the current host.
|
||||
"""
|
||||
current_host = archspec.cpu.host()
|
||||
compatible_targets = [current_host] + current_host.ancestors
|
||||
for target in compatible_targets:
|
||||
arch = Arch(platform(), 'default_os', target)
|
||||
compatible_archs.append(str(arch))
|
||||
compatible_archs = [
|
||||
str(Arch(platform(), 'default_os', target)) for target in compatible_targets
|
||||
]
|
||||
return compatible_archs
|
||||
|
||||
|
||||
@@ -587,23 +218,25 @@ def __call__(self):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_platform(new_platform):
|
||||
global platform, all_platforms
|
||||
global platform
|
||||
|
||||
msg = '"{0}" must be an instance of Platform'
|
||||
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||
assert isinstance(new_platform, spack.platforms.Platform), msg.format(new_platform)
|
||||
|
||||
original_platform_fn, original_all_platforms_fn = platform, all_platforms
|
||||
platform = _PickleableCallable(new_platform)
|
||||
all_platforms = _PickleableCallable([type(new_platform)])
|
||||
original_platform_fn = platform
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
try:
|
||||
platform = _PickleableCallable(new_platform)
|
||||
|
||||
yield new_platform
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
platform, all_platforms = original_platform_fn, original_all_platforms_fn
|
||||
yield new_platform
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
finally:
|
||||
platform = original_platform_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
@@ -37,12 +37,16 @@ def _search_duplicate_compilers(error_cls):
|
||||
"""
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from six.moves.urllib.request import urlopen
|
||||
|
||||
try:
|
||||
from collections.abc import Sequence # novm
|
||||
except ImportError:
|
||||
from collections import Sequence
|
||||
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
CALLBACKS = {}
|
||||
|
||||
@@ -261,6 +265,45 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
#: Sanity checks on linting
|
||||
# This can take some time, so it's run separately from packages
|
||||
package_https_directives = AuditClass(
|
||||
group='packages-https',
|
||||
tag='PKG-HTTPS-DIRECTIVES',
|
||||
description='Sanity checks on https checks of package urls, etc.',
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links
|
||||
"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg.homepage.startswith('http://'):
|
||||
https = re.sub("http", "https", pkg.homepage, 1)
|
||||
try:
|
||||
response = urlopen(https)
|
||||
except Exception as e:
|
||||
msg = 'Error with attempting https for "{0}": '
|
||||
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
||||
continue
|
||||
|
||||
if response.getcode() == 200:
|
||||
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
||||
errors.append(msg.format(pkg.name))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import traceback
|
||||
from contextlib import closing
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -27,7 +28,10 @@
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
@@ -204,7 +208,7 @@ def find_built_spec(self, spec):
|
||||
The cache can be updated by calling ``update()`` on the cache.
|
||||
|
||||
Args:
|
||||
spec (Spec): Concrete spec to find
|
||||
spec (spack.spec.Spec): Concrete spec to find
|
||||
|
||||
Returns:
|
||||
An list of objects containing the found specs and mirror url where
|
||||
@@ -581,7 +585,7 @@ def get_buildfile_manifest(spec):
|
||||
added = True
|
||||
|
||||
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||
if ((m_subtype in ('x-executable', 'x-sharedlib')
|
||||
if ((m_subtype in ('x-executable', 'x-sharedlib', 'x-pie-executable')
|
||||
and sys.platform != 'darwin') or
|
||||
(m_subtype in ('x-mach-binary')
|
||||
and sys.platform == 'darwin') or
|
||||
@@ -613,9 +617,8 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
||||
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
||||
|
||||
# Create buildinfo data and write it to disk
|
||||
import spack.hooks.sbang as sbang
|
||||
buildinfo = {}
|
||||
buildinfo['sbang_install_path'] = sbang.sbang_install_path()
|
||||
buildinfo['sbang_install_path'] = spack.hooks.sbang.sbang_install_path()
|
||||
buildinfo['relative_rpaths'] = rel
|
||||
buildinfo['buildpath'] = spack.store.layout.root
|
||||
buildinfo['spackprefix'] = spack.paths.prefix
|
||||
@@ -706,20 +709,14 @@ def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
cache_prefix. This page contains a link for each binary package (.yaml)
|
||||
under cache_prefix.
|
||||
cache_prefix. This page contains a link for each binary package (.yaml or
|
||||
.json) under cache_prefix.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
||||
|
||||
try:
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml'))
|
||||
if entry.endswith('.yaml') or entry.endswith('spec.json'))
|
||||
except KeyError as inst:
|
||||
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
@@ -733,24 +730,97 @@ def generate_package_index(cache_prefix):
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
tty.debug('Retrieving spec.yaml files from {0} to build index'.format(
|
||||
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
|
||||
cache_prefix))
|
||||
|
||||
all_mirror_specs = {}
|
||||
|
||||
for file_path in file_list:
|
||||
try:
|
||||
yaml_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(yaml_url))
|
||||
_, _, yaml_file = web_util.read_from_url(yaml_url)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
# yaml_obj = syaml.load(yaml_contents)
|
||||
# s = Spec.from_yaml(yaml_obj)
|
||||
s = Spec.from_yaml(yaml_contents)
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
spec_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
spec_dict = sjson.load(spec_file_contents)
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
spec_dict = syaml.load(spec_file_contents)
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
all_mirror_specs[s.dag_hash()] = {
|
||||
'spec_url': spec_url,
|
||||
'spec': s,
|
||||
'num_deps': len(list(s.traverse(root=False))),
|
||||
'binary_cache_checksum': spec_dict['binary_cache_checksum'],
|
||||
'buildinfo': spec_dict['buildinfo'],
|
||||
}
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading spec.yaml: {0}'.format(file_path))
|
||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
sorted_specs = sorted(all_mirror_specs.keys(),
|
||||
key=lambda k: all_mirror_specs[k]['num_deps'])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
||||
|
||||
try:
|
||||
tty.debug('Specs sorted by number of dependencies:')
|
||||
for dag_hash in sorted_specs:
|
||||
spec_record = all_mirror_specs[dag_hash]
|
||||
s = spec_record['spec']
|
||||
num_deps = spec_record['num_deps']
|
||||
tty.debug(' {0}/{1} -> {2}'.format(
|
||||
s.name, dag_hash[:7], num_deps))
|
||||
if num_deps > 0:
|
||||
# Check each of this spec's dependencies (which we have already
|
||||
# processed), as they are the source of truth for their own
|
||||
# full hash. If the full hash we have for any deps does not
|
||||
# match what those deps have themselves, then we need to splice
|
||||
# this spec with those deps, and push this spliced spec
|
||||
# (spec.json file) back to the mirror, as well as update the
|
||||
# all_mirror_specs dictionary with this spliced spec.
|
||||
to_splice = []
|
||||
for dep in s.dependencies():
|
||||
dep_dag_hash = dep.dag_hash()
|
||||
if dep_dag_hash in all_mirror_specs:
|
||||
true_dep = all_mirror_specs[dep_dag_hash]['spec']
|
||||
if true_dep.full_hash() != dep.full_hash():
|
||||
to_splice.append(true_dep)
|
||||
|
||||
if to_splice:
|
||||
tty.debug(' needs the following deps spliced:')
|
||||
for true_dep in to_splice:
|
||||
tty.debug(' {0}/{1}'.format(
|
||||
true_dep.name, true_dep.dag_hash()[:7]))
|
||||
s = s.splice(true_dep, True)
|
||||
|
||||
# Push this spliced spec back to the mirror
|
||||
spliced_spec_dict = s.to_dict(hash=ht.full_hash)
|
||||
for key in ['binary_cache_checksum', 'buildinfo']:
|
||||
spliced_spec_dict[key] = spec_record[key]
|
||||
|
||||
temp_json_path = os.path.join(tmpdir, 'spliced.spec.json')
|
||||
with open(temp_json_path, 'w') as fd:
|
||||
fd.write(sjson.dump(spliced_spec_dict))
|
||||
|
||||
spliced_spec_url = spec_record['spec_url']
|
||||
web_util.push_to_url(
|
||||
temp_json_path, spliced_spec_url, keep_original=False)
|
||||
tty.debug(' spliced and wrote {0}'.format(
|
||||
spliced_spec_url))
|
||||
spec_record['spec'] = s
|
||||
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
|
||||
# Now that we have fixed any old specfiles that might have had the wrong
|
||||
# full hash for their dependencies, we can generate the index, compute
|
||||
# the hash, and push those files to the mirror.
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
@@ -782,6 +852,7 @@ def generate_package_index(cache_prefix):
|
||||
msg = 'Encountered problem pushing package index to {0}: {1}'.format(
|
||||
cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
tty.debug('\n' + traceback.format_exc())
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@@ -883,19 +954,27 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
# need to copy the spec file so the build cache can be downloaded
|
||||
# without concretizing with the current spack packages
|
||||
# and preferences
|
||||
spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.realpath(
|
||||
os.path.join(cache_prefix, specfile_name))
|
||||
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path,
|
||||
os.path.realpath(tmpdir)))
|
||||
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
if force:
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
else:
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (web_util.url_exists(remote_specfile_path) or
|
||||
web_util.url_exists(remote_specfile_path_deprecated)):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
# make a copy of the install directory to work with
|
||||
workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
|
||||
@@ -943,15 +1022,23 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# add sha256 checksum to spec.yaml
|
||||
# add sha256 checksum to spec.json
|
||||
|
||||
with open(spec_file, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
spec_dict = yaml.load(content)
|
||||
if spec_file.endswith('.yaml'):
|
||||
spec_dict = yaml.load(content)
|
||||
elif spec_file.endswith('.json'):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
raise ValueError(
|
||||
'{0} not a valid spec file type (json or yaml)'.format(
|
||||
spec_file))
|
||||
bchecksum = {}
|
||||
bchecksum['hash_algorithm'] = 'sha256'
|
||||
bchecksum['hash'] = checksum
|
||||
spec_dict['binary_cache_checksum'] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.yaml.
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo['relative_prefix'] = os.path.relpath(
|
||||
@@ -960,7 +1047,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
spec_dict['buildinfo'] = buildinfo
|
||||
|
||||
with open(specfile_path, 'w') as outfile:
|
||||
outfile.write(syaml.dump(spec_dict))
|
||||
outfile.write(sjson.dump(spec_dict))
|
||||
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
@@ -1014,14 +1101,14 @@ def download_tarball(spec, preferred_mirrors=None):
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (Spec): Concrete spec
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
preferred_mirrors (list): If provided, this is a list of preferred
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
|
||||
Returns:
|
||||
Path to the downloaded tarball, or ``None`` if the tarball could not
|
||||
be downloaded from any configured mirrors.
|
||||
be downloaded from any configured mirrors.
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " +
|
||||
@@ -1070,7 +1157,7 @@ def make_package_relative(workdir, spec, allow_root):
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.architecture.get_platform(spec.platform)
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(
|
||||
cur_path_names, orig_path_names, old_layout_root)
|
||||
@@ -1104,8 +1191,6 @@ def relocate_package(spec, allow_root):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
import spack.hooks.sbang as sbang
|
||||
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.layout.root)
|
||||
@@ -1144,7 +1229,8 @@ def relocate_package(spec, allow_root):
|
||||
prefix_to_prefix_bin = OrderedDict({})
|
||||
|
||||
if old_sbang_install_path:
|
||||
prefix_to_prefix_text[old_sbang_install_path] = sbang.sbang_install_path()
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
@@ -1158,7 +1244,7 @@ def relocate_package(spec, allow_root):
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(old_spack_prefix)
|
||||
new_sbang = sbang.sbang_shebang_line()
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
|
||||
tty.debug("Relocating package from",
|
||||
@@ -1182,7 +1268,7 @@ def is_backup_file(file):
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.architecture.get_platform(spec.platform)
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate,
|
||||
old_layout_root,
|
||||
@@ -1241,15 +1327,26 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(tmpdir, specfile_name)
|
||||
|
||||
specfile_is_json = True
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(tmpdir, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(tmpdir)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_is_json = False
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
try:
|
||||
@@ -1272,7 +1369,10 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spec_dict = {}
|
||||
with open(specfile_path, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
spec_dict = syaml.load(content)
|
||||
if specfile_is_json:
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
|
||||
# if the checksums don't match don't install
|
||||
@@ -1349,27 +1449,39 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_is_json = True
|
||||
lenient = not full_hash_match
|
||||
found_specs = []
|
||||
spec_full_hash = spec.full_hash()
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url = url_util.join(
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name)
|
||||
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url)
|
||||
fetched_spec_yaml = codecs.getreader('utf-8')(fs).read()
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url), url_err)
|
||||
continue
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
specfile_contents = codecs.getreader('utf-8')(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
fetched_spec = Spec.from_yaml(fetched_spec_yaml)
|
||||
if specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
# Do not recompute the full hash for the fetched spec, instead just
|
||||
@@ -1390,14 +1502,14 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
indicating the mirrors on which it can be found
|
||||
|
||||
Args:
|
||||
spec (Spec): The spec to look for in binary mirrors
|
||||
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
||||
full_hash_match (bool): If True, only includes mirrors where the spec
|
||||
full hash matches the locally computed full hash of the ``spec``
|
||||
argument. If False, any mirror which has a matching DAG hash
|
||||
is included in the results.
|
||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||
with the mirrors in this dictionary.
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.yaml``
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||
files from remote mirrors, only consider the indices.
|
||||
|
||||
Return:
|
||||
@@ -1594,57 +1706,91 @@ def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||
pkg_name, pkg_version, pkg_hash, pkg_full_hash))
|
||||
tty.debug(spec.tree())
|
||||
|
||||
# Try to retrieve the .spec.yaml directly, based on the known
|
||||
# Try to retrieve the specfile directly, based on the known
|
||||
# format of the name, in order to determine if the package
|
||||
# needs to be rebuilt.
|
||||
cache_prefix = build_cache_prefix(mirror_url)
|
||||
spec_yaml_file_name = tarball_name(spec, '.spec.yaml')
|
||||
file_path = os.path.join(cache_prefix, spec_yaml_file_name)
|
||||
specfile_is_json = True
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
deprecated_specfile_path = os.path.join(cache_prefix,
|
||||
deprecated_specfile_name)
|
||||
|
||||
result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
|
||||
spec.short_spec, '' if rebuild_on_errors else 'not ')
|
||||
|
||||
try:
|
||||
_, _, yaml_file = web_util.read_from_url(file_path)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
_, _, spec_file = web_util.read_from_url(specfile_path)
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(spec.short_spec, file_path))
|
||||
tty.debug(url_err)
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(deprecated_specfile_path)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError) as url_err_y:
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1} or {2}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(
|
||||
spec.short_spec,
|
||||
specfile_path,
|
||||
deprecated_specfile_path))
|
||||
tty.debug(url_err)
|
||||
tty.debug(url_err_y)
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
if not spec_file_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(
|
||||
specfile_path if specfile_is_json else deprecated_specfile_path))
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
if not yaml_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(file_path))
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
spec_dict = (sjson.load(spec_file_contents)
|
||||
if specfile_is_json else syaml.load(spec_file_contents))
|
||||
|
||||
spec_yaml = syaml.load(yaml_contents)
|
||||
|
||||
yaml_spec = spec_yaml['spec']
|
||||
try:
|
||||
nodes = spec_dict['spec']['nodes']
|
||||
except KeyError:
|
||||
# Prior node dict format omitted 'nodes' key
|
||||
nodes = spec_dict['spec']
|
||||
name = spec.name
|
||||
|
||||
# The "spec" key in the yaml is a list of objects, each with a single
|
||||
# In the old format:
|
||||
# The "spec" key represents a list of objects, each with a single
|
||||
# key that is the package name. While the list usually just contains
|
||||
# a single object, we iterate over the list looking for the object
|
||||
# with the name of this concrete spec as a key, out of an abundance
|
||||
# of caution.
|
||||
cached_pkg_specs = [item[name] for item in yaml_spec if name in item]
|
||||
# In format version 2:
|
||||
# ['spec']['nodes'] is still a list of objects, but with a
|
||||
# multitude of keys. The list will commonly contain many objects, and in the
|
||||
# case of build specs, it is highly likely that the same name will occur
|
||||
# once as the actual package, and then again as the build provenance of that
|
||||
# same package. Hence format version 2 matches on the dag hash, not name.
|
||||
if nodes and 'name' not in nodes[0]:
|
||||
# old style
|
||||
cached_pkg_specs = [item[name] for item in nodes if name in item]
|
||||
elif nodes and spec_dict['spec']['_meta']['version'] == 2:
|
||||
cached_pkg_specs = [item for item in nodes
|
||||
if item[ht.dag_hash.name] == spec.dag_hash()]
|
||||
cached_target = cached_pkg_specs[0] if cached_pkg_specs else None
|
||||
|
||||
# If either the full_hash didn't exist in the .spec.yaml file, or it
|
||||
# If either the full_hash didn't exist in the specfile, or it
|
||||
# did, but didn't match the one we computed locally, then we should
|
||||
# just rebuild. This can be simplified once the dag_hash and the
|
||||
# full_hash become the same thing.
|
||||
rebuild = False
|
||||
if not cached_target or 'full_hash' not in cached_target:
|
||||
reason = 'full_hash was missing from remote spec.yaml'
|
||||
|
||||
if not cached_target:
|
||||
reason = 'did not find spec in specfile contents'
|
||||
rebuild = True
|
||||
elif ht.full_hash.name not in cached_target:
|
||||
reason = 'full_hash was missing from remote specfile'
|
||||
rebuild = True
|
||||
else:
|
||||
full_hash = cached_target['full_hash']
|
||||
full_hash = cached_target[ht.full_hash.name]
|
||||
if full_hash != pkg_full_hash:
|
||||
reason = 'hash mismatch, remote = {0}, local = {1}'.format(
|
||||
full_hash, pkg_full_hash)
|
||||
@@ -1667,11 +1813,11 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
|
||||
Arguments:
|
||||
mirrors (dict): Mirrors to check against
|
||||
specs (iterable): Specs to check against mirrors
|
||||
output_file (string): Path to output file to be written. If provided,
|
||||
specs (typing.Iterable): Specs to check against mirrors
|
||||
output_file (str): Path to output file to be written. If provided,
|
||||
mirrors with missing or out-of-date specs will be formatted as a
|
||||
JSON object and written to this file.
|
||||
rebuild_on_errors (boolean): Treat any errors encountered while
|
||||
rebuild_on_errors (bool): Treat any errors encountered while
|
||||
checking specs as a signal to rebuild package.
|
||||
|
||||
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
|
||||
@@ -1706,24 +1852,23 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
|
||||
def _download_buildcache_entry(mirror_root, descriptions):
|
||||
for description in descriptions:
|
||||
description_url = os.path.join(mirror_root, description['url'])
|
||||
path = description['path']
|
||||
fail_if_missing = description['required']
|
||||
|
||||
mkdirp(path)
|
||||
|
||||
stage = Stage(
|
||||
description_url, name="build_cache", path=path, keep=True)
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError as e:
|
||||
tty.debug(e)
|
||||
fail_if_missing = description['required']
|
||||
for url in description['url']:
|
||||
description_url = os.path.join(mirror_root, url)
|
||||
stage = Stage(
|
||||
description_url, name="build_cache", path=path, keep=True)
|
||||
try:
|
||||
stage.fetch()
|
||||
break
|
||||
except fs.FetchError as e:
|
||||
tty.debug(e)
|
||||
else:
|
||||
if fail_if_missing:
|
||||
tty.error('Failed to download required url {0}'.format(
|
||||
description_url))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -2,8 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
try:
|
||||
@@ -18,15 +24,292 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.main
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
|
||||
def _bootstrapper(type):
|
||||
"""Decorator to register classes implementing bootstrapping
|
||||
methods.
|
||||
|
||||
Args:
|
||||
type (str): string identifying the class
|
||||
"""
|
||||
def _register(cls):
|
||||
_bootstrap_methods[type] = cls
|
||||
return cls
|
||||
return _register
|
||||
|
||||
|
||||
def _try_import_from_store(module, abstract_spec_str):
|
||||
"""Return True if the module can be imported from an already
|
||||
installed spec, False otherwise.
|
||||
|
||||
Args:
|
||||
module: Python module to be imported
|
||||
abstract_spec_str: abstract spec that may provide the module
|
||||
"""
|
||||
bincache_platform = spack.architecture.real_platform()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.linux.Linux()
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
abstract_spec_str = str(spack.spec.Spec(abstract_spec_str))
|
||||
|
||||
# We have to run as part of this python interpreter
|
||||
abstract_spec_str += ' ^' + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.db.query(abstract_spec_str, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
lib_spd = candidate_spec['python'].package.default_site_packages_dir
|
||||
lib64_spd = lib_spd.replace('lib/', 'lib64/')
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, lib_spd),
|
||||
os.path.join(candidate_spec.prefix, lib64_spd)
|
||||
]
|
||||
sys.path.extend(module_paths)
|
||||
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, abstract_spec_str, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = sys.path[:-2]
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _fix_ext_suffix(candidate_spec):
|
||||
"""Fix the external suffixes of Python extensions on the fly for
|
||||
platforms that may need it
|
||||
|
||||
Args:
|
||||
candidate_spec (Spec): installed spec with a Python module
|
||||
to be checked.
|
||||
"""
|
||||
# Here we map target families to the patterns expected
|
||||
# by pristine CPython. Only architectures with known issues
|
||||
# are included. Known issues:
|
||||
#
|
||||
# [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734
|
||||
#
|
||||
_suffix_to_be_checked = {
|
||||
'ppc64le': {
|
||||
'glob': '*.cpython-*-powerpc64le-linux-gnu.so',
|
||||
're': r'.cpython-[\w]*-powerpc64le-linux-gnu.so',
|
||||
'fmt': r'{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so'
|
||||
}
|
||||
}
|
||||
|
||||
# If the current architecture is not problematic return
|
||||
generic_target = archspec.cpu.host().family
|
||||
if str(generic_target) not in _suffix_to_be_checked:
|
||||
return
|
||||
|
||||
# If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches
|
||||
# the expectations, return since the package is surely good
|
||||
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
|
||||
if ext_suffix is None:
|
||||
return
|
||||
|
||||
expected = _suffix_to_be_checked[str(generic_target)]
|
||||
if fnmatch.fnmatch(ext_suffix, expected['glob']):
|
||||
return
|
||||
|
||||
# If we are here it means the current interpreter expects different names
|
||||
# than pristine CPython. So:
|
||||
# 1. Find what we have installed
|
||||
# 2. Create symbolic links for the other names, it they're not there already
|
||||
|
||||
# Check if standard names are installed and if we have to create
|
||||
# link for this interpreter
|
||||
standard_extensions = fs.find(candidate_spec.prefix, expected['glob'])
|
||||
link_names = [re.sub(expected['re'], ext_suffix, s) for s in standard_extensions]
|
||||
for file_name, link_name in zip(standard_extensions, link_names):
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(file_name, link_name)
|
||||
|
||||
# Check if this interpreter installed something and we have to create
|
||||
# links for a standard CPython interpreter
|
||||
non_standard_extensions = fs.find(candidate_spec.prefix, '*' + ext_suffix)
|
||||
for abs_path in non_standard_extensions:
|
||||
directory, filename = os.path.split(abs_path)
|
||||
module = filename.split('.')[0]
|
||||
link_name = os.path.join(directory, expected['fmt'].format(
|
||||
module=module, major=sys.version_info[0], minor=sys.version_info[1])
|
||||
)
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(abs_path, link_name)
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
|
||||
# Try to install from an unsigned binary cache
|
||||
abstract_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
# On Cray we want to use Linux binaries if available from mirrors
|
||||
bincache_platform = spack.architecture.real_platform()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.Linux()
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
abstract_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
# Read information on verified clingo binaries
|
||||
json_filename = '{0}.json'.format(module)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
|
||||
buildcache = spack.main.SpackCommand('buildcache')
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
mirror_scope = spack.config.InternalConfigScope(
|
||||
'bootstrap', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
with spack.config.override(mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that wwe know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
for item in data['verified']:
|
||||
candidate_spec = item['spec']
|
||||
python_spec = item['python']
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for pkg_name, pkg_hash, pkg_sha256 in item['binaries']:
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Try installing "{1}" from binary '
|
||||
'cache at "{2}"')
|
||||
tty.debug(msg.format(module, pkg_name, self.url))
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null"
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family)
|
||||
}
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
with spack.config.override(
|
||||
'compilers', [{'compiler': compiler_entry}]
|
||||
):
|
||||
spec_str = '/' + pkg_hash
|
||||
install_args = [
|
||||
'install',
|
||||
'--sha256', pkg_sha256,
|
||||
'-a', '-u', '-o', '-f', spec_str
|
||||
]
|
||||
buildcache(*install_args, fail_on_error=False)
|
||||
# TODO: undo installations that didn't complete?
|
||||
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
|
||||
@staticmethod
|
||||
def try_import(module, abstract_spec_str):
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
abstract_spec_str += ' os=fe'
|
||||
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
if module == 'clingo':
|
||||
# TODO: remove when the old concretizer is deprecated
|
||||
concrete_spec._old_concretize()
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
concrete_spec.package.do_install()
|
||||
|
||||
return _try_import_from_store(module, abstract_spec_str=abstract_spec_str)
|
||||
|
||||
|
||||
def _make_bootstrapper(conf):
|
||||
"""Return a bootstrap object built according to the
|
||||
configuration argument
|
||||
"""
|
||||
btype = conf['type']
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
@@ -53,7 +336,7 @@ def spack_python_interpreter():
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = os.path.dirname(os.path.dirname(sys.executable))
|
||||
python_prefix = sys.exec_prefix
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
@@ -67,69 +350,58 @@ def spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def make_module_available(module, spec=None, install=False):
|
||||
"""Ensure module is importable"""
|
||||
# If we already can import it, that's great
|
||||
try:
|
||||
__import__(module)
|
||||
def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
"""Make the requested module available for import, or raise.
|
||||
|
||||
This function tries to import a Python module in the current interpreter
|
||||
using, in order, the methods configured in bootstrap.yaml.
|
||||
|
||||
If none of the methods succeed, an exception is raised. The function exits
|
||||
on first success.
|
||||
|
||||
Args:
|
||||
module (str): module to be imported in the current interpreter
|
||||
abstract_spec (str): abstract spec that might provide the module. If not
|
||||
given it defaults to "module"
|
||||
|
||||
Raises:
|
||||
ImportError: if the module couldn't be imported
|
||||
"""
|
||||
# If we can import it already, that's great
|
||||
tty.debug("[BOOTSTRAP MODULE {0}] Try importing from Python".format(module))
|
||||
if _python_import(module):
|
||||
return
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# If it's already installed, use it
|
||||
# Search by spec
|
||||
spec = spack.spec.Spec(spec or module)
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
# We have to run as part of this python
|
||||
# We can constrain by a shortened version in place of a version range
|
||||
# because this spec is only used for querying or as a placeholder to be
|
||||
# replaced by an external that already has a concrete version. This syntax
|
||||
# is not sufficient when concretizing without an external, as it will
|
||||
# concretize to python@X.Y instead of python@X.Y.Z
|
||||
python_requirement = '^' + spec_for_current_python()
|
||||
spec.constrain(python_requirement)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
|
||||
for ispec in installed_specs:
|
||||
# TODO: make sure run-environment is appropriate
|
||||
module_path = os.path.join(ispec.prefix,
|
||||
ispec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
tty.warn("Spec %s did not provide module %s" % (ispec, module))
|
||||
sys.path = sys.path[:-2]
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
|
||||
def _raise_error(module_name, module_spec):
|
||||
error_msg = 'cannot import module "{0}"'.format(module_name)
|
||||
if module_spec:
|
||||
error_msg += ' from spec "{0}'.format(module_spec)
|
||||
raise ImportError(error_msg)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise ImportError(msg)
|
||||
|
||||
if not install:
|
||||
_raise_error(module, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
spec.package.do_install()
|
||||
|
||||
module_path = os.path.join(spec.prefix,
|
||||
spec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
def _python_import(module):
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
sys.path = sys.path[:-2]
|
||||
_raise_error(module, spec)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_executable(exe, spec=None, install=False):
|
||||
@@ -137,13 +409,14 @@ def get_executable(exe, spec=None, install=False):
|
||||
|
||||
Args:
|
||||
exe (str): needed executable name
|
||||
spec (Spec or str): spec to search for exe in (default exe)
|
||||
spec (spack.spec.Spec or str): spec to search for exe in (default exe)
|
||||
install (bool): install spec if not available
|
||||
|
||||
When ``install`` is True, Spack will use the python used to run Spack as an
|
||||
external. The ``install`` option should only be used with packages that
|
||||
install quickly (when using external python) or are guaranteed by Spack
|
||||
organization to be in a binary mirror (clingo)."""
|
||||
organization to be in a binary mirror (clingo).
|
||||
"""
|
||||
# Search the system first
|
||||
runner = spack.util.executable.which(exe)
|
||||
if runner:
|
||||
@@ -216,15 +489,35 @@ def _bootstrap_config_scopes():
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
bootstrap_store_path = store_path()
|
||||
with spack.environment.deactivate_environment():
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(bootstrap_store_path):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack.modules.disable_modules():
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def store_path():
|
||||
"""Path to the store used for bootstrapped software"""
|
||||
enabled = spack.config.get('bootstrap:enable', True)
|
||||
if not enabled:
|
||||
msg = ('bootstrapping is currently disabled. '
|
||||
'Use "spack bootstrap enable" to enable it')
|
||||
raise RuntimeError(msg)
|
||||
|
||||
bootstrap_root_path = spack.config.get(
|
||||
'bootstrap:root', spack.paths.user_bootstrap_path
|
||||
)
|
||||
bootstrap_store_path = spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, 'store')
|
||||
)
|
||||
return bootstrap_store_path
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
@@ -238,14 +531,17 @@ def clingo_root_spec():
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
spec_str += ' os=fe'
|
||||
|
||||
# Add the generic target
|
||||
generic_target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(str(generic_target))
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] clingo: {0}'.format(spec_str))
|
||||
|
||||
return spack.spec.Spec(spec_str)
|
||||
return spec_str
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(
|
||||
module='clingo', abstract_spec=clingo_root_spec()
|
||||
)
|
||||
|
@@ -61,6 +61,7 @@
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.util.cpus import cpus_available
|
||||
@@ -69,6 +70,7 @@
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
preserve_environment,
|
||||
system_dirs,
|
||||
@@ -455,11 +457,11 @@ def determine_number_of_jobs(
|
||||
cap to the number of CPUs available to avoid oversubscription.
|
||||
|
||||
Parameters:
|
||||
parallel (bool): true when package supports parallel builds
|
||||
command_line (int/None): command line override
|
||||
config_default (int/None): config default number of jobs
|
||||
max_cpus (int/None): maximum number of CPUs available. When None, this
|
||||
value is automatically determined.
|
||||
parallel (bool or None): true when package supports parallel builds
|
||||
command_line (int or None): command line override
|
||||
config_default (int or None): config default number of jobs
|
||||
max_cpus (int or None): maximum number of CPUs available. When None, this
|
||||
value is automatically determined.
|
||||
"""
|
||||
if not parallel:
|
||||
return 1
|
||||
@@ -685,14 +687,14 @@ def get_std_cmake_args(pkg):
|
||||
"""List of standard arguments used if a package is a CMakePackage.
|
||||
|
||||
Returns:
|
||||
list of str: standard arguments that would be used if this
|
||||
list: standard arguments that would be used if this
|
||||
package were a CMakePackage instance.
|
||||
|
||||
Args:
|
||||
pkg (PackageBase): package under consideration
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list of str: arguments for cmake
|
||||
list: arguments for cmake
|
||||
"""
|
||||
return spack.build_systems.cmake.CMakePackage._std_args(pkg)
|
||||
|
||||
@@ -701,14 +703,14 @@ def get_std_meson_args(pkg):
|
||||
"""List of standard arguments used if a package is a MesonPackage.
|
||||
|
||||
Returns:
|
||||
list of str: standard arguments that would be used if this
|
||||
list: standard arguments that would be used if this
|
||||
package were a MesonPackage instance.
|
||||
|
||||
Args:
|
||||
pkg (PackageBase): package under consideration
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list of str: arguments for meson
|
||||
list: arguments for meson
|
||||
"""
|
||||
return spack.build_systems.meson.MesonPackage._std_args(pkg)
|
||||
|
||||
@@ -738,7 +740,7 @@ def load_external_modules(pkg):
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (PackageBase): package to load deps for
|
||||
pkg (spack.package.PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
@@ -781,6 +783,13 @@ def setup_package(pkg, dirty, context='build'):
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'.")
|
||||
elif context == 'test':
|
||||
env.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
spack.user_environment.prefix_inspections(pkg.spec.platform),
|
||||
exclude=is_system_path
|
||||
)
|
||||
)
|
||||
pkg.setup_run_environment(env)
|
||||
env.prepend_path('PATH', '.')
|
||||
|
||||
@@ -864,7 +873,7 @@ def modifications_from_dependencies(spec, context, custom_mods_only=True):
|
||||
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
|
||||
|
||||
Args:
|
||||
spec (Spec): spec for which we want the modifications
|
||||
spec (spack.spec.Spec): spec for which we want the modifications
|
||||
context (str): either 'build' for build-time modifications or 'run'
|
||||
for run-time modifications
|
||||
"""
|
||||
@@ -1062,9 +1071,9 @@ def start_build_process(pkg, function, kwargs):
|
||||
|
||||
Args:
|
||||
|
||||
pkg (PackageBase): package whose environment we should set up the
|
||||
pkg (spack.package.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (callable): argless function to run in the child
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
|
||||
Usage::
|
||||
@@ -1149,7 +1158,7 @@ def get_package_context(traceback, context=3):
|
||||
"""Return some context for an error message when the build fails.
|
||||
|
||||
Args:
|
||||
traceback (traceback): A traceback from some exception raised during
|
||||
traceback: A traceback from some exception raised during
|
||||
install
|
||||
|
||||
context (int): Lines of context to show before and after the line
|
||||
|
@@ -30,7 +30,7 @@ class AutotoolsPackage(PackageBase):
|
||||
|
||||
They all have sensible defaults and for many packages the only thing
|
||||
necessary will be to override the helper method
|
||||
:py:meth:`~.AutotoolsPackage.configure_args`.
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
|
||||
For a finer tuning you may also override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
@@ -331,7 +331,7 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
"""Runs configure with the arguments specified in
|
||||
:py:meth:`~.AutotoolsPackage.configure_args`
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
|
||||
and an appropriately set prefix.
|
||||
"""
|
||||
options = getattr(self, 'configure_flag_args', [])
|
||||
@@ -376,8 +376,8 @@ def _activate_or_not(
|
||||
activation_value=None
|
||||
):
|
||||
"""This function contains the current implementation details of
|
||||
:py:meth:`~.AutotoolsPackage.with_or_without` and
|
||||
:py:meth:`~.AutotoolsPackage.enable_or_disable`.
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name (str): name of the variant that is being processed
|
||||
@@ -385,7 +385,7 @@ def _activate_or_not(
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
in the case of ``with_or_without``)
|
||||
activation_value (callable): callable that accepts a single
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value. This value is either one of the allowed values for a
|
||||
multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
@@ -420,7 +420,7 @@ def _activate_or_not(
|
||||
for ``<spec-name> foo=x +bar``
|
||||
|
||||
Returns:
|
||||
list of strings that corresponds to the activation/deactivation
|
||||
list: list of strings that corresponds to the activation/deactivation
|
||||
of the variant that has been processed
|
||||
|
||||
Raises:
|
||||
@@ -501,7 +501,7 @@ def with_or_without(self, name, activation_value=None):
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (callable): callable that accepts a single
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value and returns the parameter to be used leading to an entry
|
||||
of the type ``--with-{name}={parameter}``.
|
||||
|
||||
@@ -514,12 +514,13 @@ def with_or_without(self, name, activation_value=None):
|
||||
return self._activate_or_not(name, 'with', 'without', activation_value)
|
||||
|
||||
def enable_or_disable(self, name, activation_value=None):
|
||||
"""Same as :py:meth:`~.AutotoolsPackage.with_or_without` but substitute
|
||||
``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (callable): if present accepts a single value
|
||||
activation_value (typing.Callable): if present accepts a single value
|
||||
and returns the parameter to be used leading to an entry of the
|
||||
type ``--enable-{name}={parameter}``
|
||||
|
||||
|
@@ -108,21 +108,6 @@ def initconfig_compiler_entries(self):
|
||||
if fflags:
|
||||
entries.append(cmake_cache_string("CMAKE_Fortran_FLAGS", fflags))
|
||||
|
||||
# Override XL compiler family
|
||||
familymsg = ("Override to proper compiler family for XL")
|
||||
if "xlf" in (self.compiler.fc or ''): # noqa: F821
|
||||
entries.append(cmake_cache_string(
|
||||
"CMAKE_Fortran_COMPILER_ID", "XL",
|
||||
familymsg))
|
||||
if "xlc" in self.compiler.cc: # noqa: F821
|
||||
entries.append(cmake_cache_string(
|
||||
"CMAKE_C_COMPILER_ID", "XL",
|
||||
familymsg))
|
||||
if "xlC" in self.compiler.cxx: # noqa: F821
|
||||
entries.append(cmake_cache_string(
|
||||
"CMAKE_CXX_COMPILER_ID", "XL",
|
||||
familymsg))
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
|
@@ -236,7 +236,7 @@ def define_from_variant(self, cmake_var, variant=None):
|
||||
of ``cmake_var``.
|
||||
|
||||
This utility function is similar to
|
||||
:py:meth:`~.AutotoolsPackage.with_or_without`.
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`.
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -254,9 +254,9 @@ def define_from_variant(self, cmake_var, variant=None):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
define_from_variant('SWR')]
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
@@ -85,92 +86,104 @@ def cuda_flags(arch_list):
|
||||
# apply to platform=darwin. We currently do not provide conflicts for
|
||||
# platform=darwin with %apple-clang.
|
||||
|
||||
# GCC
|
||||
# According to
|
||||
# https://github.com/spack/spack/pull/25054#issuecomment-886531664
|
||||
# these conflicts are valid independently from the architecture
|
||||
|
||||
# minimum supported versions
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0:')
|
||||
conflicts('%gcc@:5', when='+cuda ^cuda@11.4:')
|
||||
|
||||
# maximum supported version
|
||||
# NOTE:
|
||||
# in order to not constrain future cuda version to old gcc versions,
|
||||
# it has been decided to use an upper bound for the latest version.
|
||||
# This implies that the last one in the list has to be updated at
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4')
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:',
|
||||
when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:',
|
||||
when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:',
|
||||
when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:',
|
||||
when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
with when('~allow-unsupported-compilers'):
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:', when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:', when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:', when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:', when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
|
||||
|
||||
# information prior to CUDA 9 difficult to find
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
# information prior to CUDA 9 difficult to find
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
|
||||
conflicts('%intel@:12.0', when='+cuda ^cuda@5.5:')
|
||||
conflicts('%intel@:13.0', when='+cuda ^cuda@6.0:')
|
||||
conflicts('%intel@:13.2', when='+cuda ^cuda@6.5:')
|
||||
conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
|
||||
# Intel 15.x is compatible with CUDA 7 thru current CUDA
|
||||
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
|
||||
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
|
||||
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
|
||||
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
|
||||
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
|
||||
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.1.0')
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
|
||||
conflicts('%intel@:12.0', when='+cuda ^cuda@5.5:')
|
||||
conflicts('%intel@:13.0', when='+cuda ^cuda@6.0:')
|
||||
conflicts('%intel@:13.2', when='+cuda ^cuda@6.5:')
|
||||
conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
|
||||
# Intel 15.x is compatible with CUDA 7 thru current CUDA
|
||||
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
|
||||
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
|
||||
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
|
||||
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
|
||||
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
|
||||
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.1.0')
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2:')
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
for value in cuda_arch_values:
|
||||
|
@@ -368,7 +368,7 @@ def normalize_suite_dir(self, suite_dir_name, version_globs=['*.*.*']):
|
||||
toplevel psxevars.sh or equivalent file to source (and thus by
|
||||
the modulefiles that Spack produces).
|
||||
|
||||
version_globs (list of str): Suffix glob patterns (most specific
|
||||
version_globs (list): Suffix glob patterns (most specific
|
||||
first) expected to qualify suite_dir_name to its fully
|
||||
version-specific install directory (as opposed to a
|
||||
compatibility directory or symlink).
|
||||
|
@@ -69,6 +69,9 @@ def install(self, spec, prefix, installer_path=None):
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env('HOME', prefix)
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env('XDG_RUNTIME_DIR',
|
||||
join_path(self.stage.path, 'runtime'))
|
||||
|
||||
bash(installer_path,
|
||||
'-s', '-a', '-s', '--action', 'install',
|
||||
|
@@ -127,24 +127,25 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['false']['false'],
|
||||
)
|
||||
|
||||
# Python libraries may be installed in lib or lib64
|
||||
# See issues #18520 and #17126
|
||||
for lib in ['lib', 'lib64']:
|
||||
root = os.path.join(self.prefix, lib, 'python{0}'.format(
|
||||
self.spec['python'].version.up_to(2)), 'site-packages')
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
|
||||
tty.debug('Detected the following modules: {0}'.format(modules))
|
||||
|
||||
return modules
|
||||
|
||||
def setup_file(self):
|
||||
@@ -254,15 +255,11 @@ def install_args(self, spec, prefix):
|
||||
# Get all relative paths since we set the root to `prefix`
|
||||
# We query the python with which these will be used for the lib and inc
|
||||
# directories. This ensures we use `lib`/`lib64` as expected by python.
|
||||
python = spec['python'].package.command
|
||||
command_start = 'print(distutils.sysconfig.'
|
||||
commands = ';'.join([
|
||||
'import distutils.sysconfig',
|
||||
command_start + 'get_python_lib(plat_specific=False, prefix=""))',
|
||||
command_start + 'get_python_lib(plat_specific=True, prefix=""))',
|
||||
command_start + 'get_python_inc(plat_specific=True, prefix=""))'])
|
||||
pure_site_packages_dir, plat_site_packages_dir, inc_dir = python(
|
||||
'-c', commands, output=str, error=str).strip().split('\n')
|
||||
pure_site_packages_dir = spec['python'].package.config_vars[
|
||||
'python_lib']['false']['false']
|
||||
plat_site_packages_dir = spec['python'].package.config_vars[
|
||||
'python_lib']['true']['false']
|
||||
inc_dir = spec['python'].package.config_vars['python_inc']['true']
|
||||
|
||||
args += ['--root=%s' % prefix,
|
||||
'--install-purelib=%s' % pure_site_packages_dir,
|
||||
|
@@ -64,24 +64,25 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['false']['false'],
|
||||
)
|
||||
|
||||
# Python libraries may be installed in lib or lib64
|
||||
# See issues #18520 and #17126
|
||||
for lib in ['lib', 'lib64']:
|
||||
root = os.path.join(self.prefix, lib, 'python{0}'.format(
|
||||
self.spec['python'].version.up_to(2)), 'site-packages')
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
|
||||
tty.debug('Detected the following modules: {0}'.format(modules))
|
||||
|
||||
return modules
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
|
@@ -45,6 +45,8 @@
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
@@ -612,11 +614,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# Add this mirror if it's enabled, as some specs might be up to date
|
||||
# here and thus not need to be rebuilt.
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -871,6 +876,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec:
|
||||
tty.debug('Pruning spec that does not need to be rebuilt.')
|
||||
continue
|
||||
|
||||
# Check if this spec is in our list of known failures, now that
|
||||
@@ -917,7 +923,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
bc_root = os.path.join(
|
||||
local_mirror_dir, 'build_cache')
|
||||
artifact_paths.extend([os.path.join(bc_root, p) for p in [
|
||||
bindist.tarball_name(release_spec, '.spec.yaml'),
|
||||
bindist.tarball_name(release_spec, '.spec.json'),
|
||||
bindist.tarball_name(release_spec, '.cdashid'),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]])
|
||||
@@ -1316,17 +1322,20 @@ def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
|
||||
|
||||
request = Request(cdash_api_url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
try:
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
tty.warn(msg)
|
||||
return
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
except Exception as e:
|
||||
print("Relating builds in CDash failed: {0}".format(e))
|
||||
|
||||
|
||||
def write_cdashid_to_mirror(cdashid, spec, mirror_url):
|
||||
@@ -1373,13 +1382,13 @@ def read_cdashid_from_mirror(spec, mirror_url):
|
||||
return int(contents)
|
||||
|
||||
|
||||
def push_mirror_contents(env, spec, yaml_path, mirror_url, sign_binaries):
|
||||
def push_mirror_contents(env, spec, specfile_path, mirror_url, sign_binaries):
|
||||
try:
|
||||
unsigned = not sign_binaries
|
||||
tty.debug('Creating buildcache ({0})'.format(
|
||||
'unsigned' if unsigned else 'signed'))
|
||||
spack.cmd.buildcache._createtarball(
|
||||
env, spec_yaml=yaml_path, add_deps=False,
|
||||
env, spec_file=specfile_path, add_deps=False,
|
||||
output_location=mirror_url, force=True, allow_root=True,
|
||||
unsigned=unsigned)
|
||||
except Exception as inst:
|
||||
@@ -1395,7 +1404,7 @@ def push_mirror_contents(env, spec, yaml_path, mirror_url, sign_binaries):
|
||||
# BaseException
|
||||
# object
|
||||
err_msg = 'Error msg: {0}'.format(inst)
|
||||
if 'Access Denied' in err_msg:
|
||||
if any(x in err_msg for x in ['Access Denied', 'InvalidAccessKeyId']):
|
||||
tty.msg('Permission problem writing to {0}'.format(
|
||||
mirror_url))
|
||||
tty.msg(err_msg)
|
||||
|
@@ -21,6 +21,7 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.paths
|
||||
@@ -186,29 +187,13 @@ def matching_spec_from_env(spec):
|
||||
If no matching spec is found in the environment (or if no environment is
|
||||
active), this will return the given spec but concretized.
|
||||
"""
|
||||
env = spack.environment.get_env({}, cmd_name)
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
return env.matching_spec(spec) or spec.concretized()
|
||||
else:
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
@@ -216,10 +201,10 @@ def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
env (spack.environment.Environment): a spack environment,
|
||||
if one is active, or None if no environment is active
|
||||
local (boolean, default False): do not search chained spack instances
|
||||
installed (boolean or any, or spack.database.InstallStatus or iterable
|
||||
of spack.database.InstallStatus): install status argument passed to
|
||||
database query. See ``spack.database.Database._query`` for details.
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
||||
@@ -231,11 +216,11 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False,
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
hashes (iterable): a set of hashes of specs among which to disambiguate
|
||||
local (boolean, default False): do not search chained spack instances
|
||||
installed (boolean or any, or spack.database.InstallStatus or iterable
|
||||
of spack.database.InstallStatus): install status argument passed to
|
||||
database query. See ``spack.database.Database._query`` for details.
|
||||
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.db.query_local(spec, hashes=hashes,
|
||||
@@ -277,14 +262,14 @@ def display_specs_as_json(specs, deps=False):
|
||||
if spec.dag_hash() in seen:
|
||||
continue
|
||||
seen.add(spec.dag_hash())
|
||||
records.append(spec.to_record_dict())
|
||||
records.append(spec.to_node_dict())
|
||||
|
||||
if deps:
|
||||
for dep in spec.traverse():
|
||||
if dep.dag_hash() in seen:
|
||||
continue
|
||||
seen.add(dep.dag_hash())
|
||||
records.append(dep.to_record_dict())
|
||||
records.append(dep.to_node_dict())
|
||||
|
||||
sjson.dump(records, sys.stdout)
|
||||
|
||||
@@ -333,9 +318,8 @@ def display_specs(specs, args=None, **kwargs):
|
||||
namespace.
|
||||
|
||||
Args:
|
||||
specs (list of spack.spec.Spec): the specs to display
|
||||
args (optional argparse.Namespace): namespace containing
|
||||
formatting arguments
|
||||
specs (list): the specs to display
|
||||
args (argparse.Namespace or None): namespace containing formatting arguments
|
||||
|
||||
Keyword Args:
|
||||
paths (bool): Show paths with each displayed spec
|
||||
@@ -348,9 +332,9 @@ def display_specs(specs, args=None, **kwargs):
|
||||
indent (int): indent each line this much
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorators (dict): dictionary mappng specs to decorators
|
||||
header_callback (function): called at start of arch/compiler groups
|
||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
output (stream): A file object to write to. Default is ``sys.stdout``
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
"""
|
||||
def get_arg(name, default=None):
|
||||
@@ -502,3 +486,71 @@ def extant_file(f):
|
||||
if not os.path.isfile(f):
|
||||
raise argparse.ArgumentTypeError('%s does not exist' % f)
|
||||
return f
|
||||
|
||||
|
||||
def require_active_env(cmd_name):
|
||||
"""Used by commands to get the active environment
|
||||
|
||||
If an environment is not found, print an error message that says the calling
|
||||
command *needs* an active environment.
|
||||
|
||||
Arguments:
|
||||
cmd_name (str): name of calling command
|
||||
|
||||
Returns:
|
||||
(spack.environment.Environment): the active environment
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
|
||||
if env:
|
||||
return env
|
||||
else:
|
||||
tty.die(
|
||||
'`spack %s` requires an environment' % cmd_name,
|
||||
'activate an environment first:',
|
||||
' spack env activate ENV',
|
||||
'or use:',
|
||||
' spack -e ENV %s ...' % cmd_name)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
"""Find active environment from args or environment variable.
|
||||
|
||||
Check for an environment in this order:
|
||||
1. via ``spack -e ENV`` or ``spack -D DIR`` (arguments)
|
||||
2. via a path in the spack.environment.spack_env_var environment variable.
|
||||
|
||||
If an environment is found, read it in. If not, return None.
|
||||
|
||||
Arguments:
|
||||
args (argparse.Namespace): argparse namespace with command arguments
|
||||
|
||||
Returns:
|
||||
(spack.environment.Environment): a found environment, or ``None``
|
||||
"""
|
||||
|
||||
# treat env as a name
|
||||
env = args.env
|
||||
if env:
|
||||
if ev.exists(env):
|
||||
return ev.read(env)
|
||||
|
||||
else:
|
||||
# if env was specified, see if it is a directory otherwise, look
|
||||
# at env_dir (env and env_dir are mutually exclusive)
|
||||
env = args.env_dir
|
||||
|
||||
# if no argument, look for the environment variable
|
||||
if not env:
|
||||
env = os.environ.get(ev.spack_env_var)
|
||||
|
||||
# nothing was set; there's no active environment
|
||||
if not env:
|
||||
return None
|
||||
|
||||
# if we get here, env isn't the name of a spack environment; it has
|
||||
# to be a path to an environment, or there is something wrong.
|
||||
if ev.is_env_dir(env):
|
||||
return ev.Environment(env)
|
||||
|
||||
raise ev.SpackEnvironmentError('no environment in %s' % env)
|
||||
|
@@ -30,8 +30,7 @@ def activate(parser, args):
|
||||
if len(specs) != 1:
|
||||
tty.die("activate requires one spec. %d given." % len(specs))
|
||||
|
||||
env = ev.get_env(args, 'activate')
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], ev.active_environment())
|
||||
if not spec.package.is_extension:
|
||||
tty.die("%s is not an extension." % spec.name)
|
||||
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'add a spec to an environment'
|
||||
section = "environments"
|
||||
@@ -22,7 +21,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def add(parser, args):
|
||||
env = ev.get_env(args, 'add', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='add')
|
||||
|
||||
with env.write_transaction():
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
|
@@ -58,9 +58,9 @@ def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=Fals
|
||||
analyze_spec(spec, args.analyzers, args.outdir, monitor)
|
||||
|
||||
Args:
|
||||
spec (Spec): spec object of installed package
|
||||
spec (spack.spec.Spec): spec object of installed package
|
||||
analyzers (list): list of analyzer (keys) to run
|
||||
monitor (monitor.SpackMonitorClient): a monitor client
|
||||
monitor (spack.monitor.SpackMonitorClient): a monitor client
|
||||
overwrite (bool): overwrite result if already exists
|
||||
"""
|
||||
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())
|
||||
@@ -95,7 +95,7 @@ def analyze(parser, args, **kwargs):
|
||||
sys.exit(0)
|
||||
|
||||
# handle active environment, if any
|
||||
env = ev.get_env(args, 'analyze')
|
||||
env = ev.active_environment()
|
||||
|
||||
# Get an disambiguate spec (we should only have one)
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
@@ -19,12 +20,24 @@ def setup_parser(subparser):
|
||||
# Audit configuration files
|
||||
sp.add_parser('configs', help='audit configuration files')
|
||||
|
||||
# Https and other linting
|
||||
https_parser = sp.add_parser('packages-https', help='check https in packages')
|
||||
https_parser.add_argument(
|
||||
'--all',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='check_all',
|
||||
help="audit all packages"
|
||||
)
|
||||
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser('packages', help='audit package recipes')
|
||||
pkg_parser.add_argument(
|
||||
'name', metavar='PKG', nargs='*',
|
||||
help='package to be analyzed (if none all packages will be processed)',
|
||||
)
|
||||
|
||||
for group in [pkg_parser, https_parser]:
|
||||
group.add_argument(
|
||||
'name', metavar='PKG', nargs='*',
|
||||
help='package to be analyzed (if none all packages will be processed)',
|
||||
)
|
||||
|
||||
# List all checks
|
||||
sp.add_parser('list', help='list available checks and exits')
|
||||
@@ -41,6 +54,17 @@ def packages(parser, args):
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def packages_https(parser, args):
|
||||
|
||||
# Since packages takes a long time, --all is required without name
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize('@*b{' + subcommand + '}:'))
|
||||
@@ -58,6 +82,7 @@ def audit(parser, args):
|
||||
subcommands = {
|
||||
'configs': configs,
|
||||
'packages': packages,
|
||||
'packages-https': packages_https,
|
||||
'list': list
|
||||
}
|
||||
subcommands[args.subcommand](parser, args)
|
||||
|
220
lib/spack/spack/cmd/bootstrap.py
Normal file
220
lib/spack/spack/cmd/bootstrap.py
Normal file
@@ -0,0 +1,220 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
section = "system"
|
||||
level = "long"
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
help="configuration scope to read/modify"
|
||||
)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(dest='subcommand')
|
||||
|
||||
enable = sp.add_parser('enable', help='enable bootstrapping')
|
||||
_add_scope_option(enable)
|
||||
|
||||
disable = sp.add_parser('disable', help='disable bootstrapping')
|
||||
_add_scope_option(disable)
|
||||
|
||||
reset = sp.add_parser(
|
||||
'reset', help='reset bootstrapping configuration to Spack defaults'
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(
|
||||
reset, ['yes_to_all']
|
||||
)
|
||||
|
||||
root = sp.add_parser(
|
||||
'root', help='get/set the root bootstrap directory'
|
||||
)
|
||||
_add_scope_option(root)
|
||||
root.add_argument(
|
||||
'path', nargs='?', default=None,
|
||||
help='set the bootstrap directory to this value'
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the method to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the method to be untrusted'
|
||||
)
|
||||
|
||||
|
||||
def _enable_or_disable(args):
|
||||
# Set to True if we called "enable", otherwise set to false
|
||||
value = args.subcommand == 'enable'
|
||||
spack.config.set('bootstrap:enable', value, scope=args.scope)
|
||||
|
||||
|
||||
def _reset(args):
|
||||
if not args.yes_to_all:
|
||||
msg = [
|
||||
"Bootstrapping configuration is being reset to Spack's defaults. "
|
||||
"Current configuration will be lost.\n",
|
||||
"Do you want to continue?"
|
||||
]
|
||||
ok_to_continue = llnl.util.tty.get_yes_or_no(
|
||||
''.join(msg), default=True
|
||||
)
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError('Aborting')
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == 'defaults':
|
||||
continue
|
||||
|
||||
# If we are in an env scope we can't delete a file, but the best we
|
||||
# can do is nullify the corresponding configuration
|
||||
if (scope.name.startswith('env') and
|
||||
spack.config.get('bootstrap', scope=scope.name)):
|
||||
spack.config.set('bootstrap', {}, scope=scope.name)
|
||||
continue
|
||||
|
||||
# If we are outside of an env scope delete the bootstrap.yaml file
|
||||
bootstrap_yaml = os.path.join(scope.path, 'bootstrap.yaml')
|
||||
backup_file = bootstrap_yaml + '.bkp'
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
|
||||
def _root(args):
|
||||
if args.path:
|
||||
spack.config.set('bootstrap:root', args.path, scope=args.scope)
|
||||
|
||||
root = spack.config.get('bootstrap:root', default=None, scope=args.scope)
|
||||
if root:
|
||||
root = spack.util.path.canonicalize_path(root)
|
||||
print(root)
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
)
|
||||
return
|
||||
|
||||
def _print_method(source, trusted):
|
||||
color = llnl.util.tty.color
|
||||
|
||||
def fmt(header, content):
|
||||
header_fmt = "@*b{{{0}:}} {1}"
|
||||
color.cprint(header_fmt.format(header, content))
|
||||
|
||||
trust_str = "@*y{UNKNOWN}"
|
||||
if trusted is True:
|
||||
trust_str = "@*g{TRUSTED}"
|
||||
elif trusted is False:
|
||||
trust_str = "@*r{UNTRUSTED}"
|
||||
|
||||
fmt("Name", source['name'] + ' ' + trust_str)
|
||||
print()
|
||||
fmt(" Type", source['type'])
|
||||
print()
|
||||
|
||||
info_lines = ['\n']
|
||||
for key, value in source.get('info', {}).items():
|
||||
info_lines.append(' ' * 4 + '@*{{{0}}}: {1}\n'.format(key, value))
|
||||
if len(info_lines) > 1:
|
||||
fmt(" Info", ''.join(info_lines))
|
||||
|
||||
description_lines = ['\n']
|
||||
for line in source['description'].split('\n'):
|
||||
description_lines.append(' ' * 4 + line + '\n')
|
||||
|
||||
fmt(" Description", ''.join(description_lines))
|
||||
|
||||
trusted = spack.config.get('bootstrap:trusted', {})
|
||||
for s in sources:
|
||||
_print_method(s, trusted.get(s['name'], None))
|
||||
|
||||
|
||||
def _write_trust_state(args, value):
|
||||
name = args.name
|
||||
sources = spack.config.get('bootstrap:sources')
|
||||
|
||||
matches = [s for s in sources if s['name'] == name]
|
||||
if not matches:
|
||||
names = [s['name'] for s in sources]
|
||||
msg = ('there is no bootstrapping method named "{0}". Valid '
|
||||
'method names are: {1}'.format(name, ', '.join(names)))
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if len(matches) > 1:
|
||||
msg = ('there is more than one bootstrapping method named "{0}". '
|
||||
'Please delete all methods but one from bootstrap.yaml '
|
||||
'before proceeding').format(name)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Setting the scope explicitly is needed to not copy over to a new scope
|
||||
# the entire default configuration for bootstrap.yaml
|
||||
scope = args.scope or spack.config.default_modify_scope('bootstrap')
|
||||
spack.config.add(
|
||||
'bootstrap:trusted:{0}:{1}'.format(name, str(value)), scope=scope
|
||||
)
|
||||
|
||||
|
||||
def _trust(args):
|
||||
_write_trust_state(args, value=True)
|
||||
msg = '"{0}" is now trusted for bootstrapping'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _untrust(args):
|
||||
_write_trust_state(args, value=False)
|
||||
msg = '"{0}" is now untrusted and will not be used for bootstrapping'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'enable': _enable_or_disable,
|
||||
'disable': _enable_or_disable,
|
||||
'reset': _reset,
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
@@ -2,10 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -15,16 +16,20 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.crypto
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_spec_yamls
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -70,8 +75,9 @@ def setup_parser(subparser):
|
||||
create.add_argument('--rebuild-index', action='store_true',
|
||||
default=False, help="Regenerate buildcache index " +
|
||||
"after building package(s)")
|
||||
create.add_argument('-y', '--spec-yaml', default=None,
|
||||
help='Create buildcache entry for spec from yaml file')
|
||||
create.add_argument('--spec-file', default=None,
|
||||
help=('Create buildcache entry for spec from json or ' +
|
||||
'yaml file'))
|
||||
create.add_argument('--only', default='package,dependencies',
|
||||
dest='things_to_install',
|
||||
choices=['package', 'dependencies'],
|
||||
@@ -97,6 +103,8 @@ def setup_parser(subparser):
|
||||
install.add_argument('-o', '--otherarch', action='store_true',
|
||||
help="install specs from other architectures" +
|
||||
" instead of default platform and OS")
|
||||
# This argument is needed by the bootstrapping logic to verify checksums
|
||||
install.add_argument('--sha256', help=argparse.SUPPRESS)
|
||||
|
||||
arguments.add_common_arguments(install, ['specs'])
|
||||
install.set_defaults(func=installtarball)
|
||||
@@ -156,8 +164,9 @@ def setup_parser(subparser):
|
||||
help='Check single spec instead of release specs file')
|
||||
|
||||
check.add_argument(
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help='Check single spec from yaml file instead of release specs file')
|
||||
'--spec-file', default=None,
|
||||
help=('Check single spec from json or yaml file instead of release ' +
|
||||
'specs file'))
|
||||
|
||||
check.add_argument(
|
||||
'--rebuild-on-error', default=False, action='store_true',
|
||||
@@ -166,14 +175,15 @@ def setup_parser(subparser):
|
||||
|
||||
check.set_defaults(func=check_binaries)
|
||||
|
||||
# Download tarball and spec.yaml
|
||||
# Download tarball and specfile
|
||||
dltarball = subparsers.add_parser('download', help=get_tarball.__doc__)
|
||||
dltarball.add_argument(
|
||||
'-s', '--spec', default=None,
|
||||
help="Download built tarball for spec from mirror")
|
||||
dltarball.add_argument(
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help="Download built tarball for spec (from yaml file) from mirror")
|
||||
'--spec-file', default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) " +
|
||||
"from mirror"))
|
||||
dltarball.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
help="Path to directory where tarball should be downloaded")
|
||||
@@ -189,26 +199,27 @@ def setup_parser(subparser):
|
||||
'-s', '--spec', default=None,
|
||||
help='Spec string for which buildcache name is desired')
|
||||
getbuildcachename.add_argument(
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help='Path to spec yaml file for which buildcache name is desired')
|
||||
'--spec-file', default=None,
|
||||
help=('Path to spec json or yaml file for which buildcache name is ' +
|
||||
'desired'))
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
saveyaml = subparsers.add_parser('save-yaml',
|
||||
help=save_spec_yamls.__doc__)
|
||||
saveyaml.add_argument(
|
||||
savespecfile = subparsers.add_parser('save-specfile',
|
||||
help=save_specfiles.__doc__)
|
||||
savespecfile.add_argument(
|
||||
'--root-spec', default=None,
|
||||
help='Root spec of dependent spec')
|
||||
saveyaml.add_argument(
|
||||
'--root-spec-yaml', default=None,
|
||||
help='Path to yaml file containing root spec of dependent spec')
|
||||
saveyaml.add_argument(
|
||||
savespecfile.add_argument(
|
||||
'--root-specfile', default=None,
|
||||
help='Path to json or yaml file containing root spec of dependent spec')
|
||||
savespecfile.add_argument(
|
||||
'-s', '--specs', default=None,
|
||||
help='List of dependent specs for which saved yaml is desired')
|
||||
saveyaml.add_argument(
|
||||
'-y', '--yaml-dir', default=None,
|
||||
savespecfile.add_argument(
|
||||
'--specfile-dir', default=None,
|
||||
help='Path to directory where spec yamls should be saved')
|
||||
saveyaml.set_defaults(func=save_spec_yamls)
|
||||
savespecfile.set_defaults(func=save_specfiles)
|
||||
|
||||
# Copy buildcache from some directory to another mirror url
|
||||
copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
|
||||
@@ -216,13 +227,44 @@ def setup_parser(subparser):
|
||||
'--base-dir', default=None,
|
||||
help='Path to mirror directory (root of existing buildcache)')
|
||||
copy.add_argument(
|
||||
'--spec-yaml', default=None,
|
||||
help='Path to spec yaml file representing buildcache entry to copy')
|
||||
'--spec-file', default=None,
|
||||
help=('Path to spec json or yaml file representing buildcache entry to' +
|
||||
' copy'))
|
||||
copy.add_argument(
|
||||
'--destination-url', default=None,
|
||||
help='Destination mirror url')
|
||||
copy.set_defaults(func=buildcache_copy)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser('sync', help=buildcache_sync.__doc__)
|
||||
source = sync.add_mutually_exclusive_group(required=True)
|
||||
source.add_argument('--src-directory',
|
||||
metavar='DIRECTORY',
|
||||
type=str,
|
||||
help="Source mirror as a local file path")
|
||||
source.add_argument('--src-mirror-name',
|
||||
metavar='MIRROR_NAME',
|
||||
type=str,
|
||||
help="Name of the source mirror")
|
||||
source.add_argument('--src-mirror-url',
|
||||
metavar='MIRROR_URL',
|
||||
type=str,
|
||||
help="URL of the source mirror")
|
||||
dest = sync.add_mutually_exclusive_group(required=True)
|
||||
dest.add_argument('--dest-directory',
|
||||
metavar='DIRECTORY',
|
||||
type=str,
|
||||
help="Destination mirror as a local file path")
|
||||
dest.add_argument('--dest-mirror-name',
|
||||
metavar='MIRROR_NAME',
|
||||
type=str,
|
||||
help="Name of the destination mirror")
|
||||
dest.add_argument('--dest-mirror-url',
|
||||
metavar='MIRROR_URL',
|
||||
type=str,
|
||||
help="URL of the destination mirror")
|
||||
sync.set_defaults(func=buildcache_sync)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
update_index = subparsers.add_parser(
|
||||
'update-index', help=buildcache_update_index.__doc__)
|
||||
@@ -239,12 +281,13 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
|
||||
concretized specs given from cli
|
||||
|
||||
Args:
|
||||
pkgs (string): spec to be matched against installed packages
|
||||
pkgs (str): spec to be matched against installed packages
|
||||
allow_multiple_matches (bool): if True multiple matches are admitted
|
||||
env (Environment): active environment, or ``None`` if there is not one
|
||||
env (spack.environment.Environment or None): active environment, or ``None``
|
||||
if there is not one
|
||||
|
||||
Return:
|
||||
list of specs
|
||||
list: list of specs
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
@@ -328,16 +371,19 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
def _createtarball(env, spec_file=None, packages=None, add_spec=True,
|
||||
add_deps=True, output_location=os.getcwd(),
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
if spec_yaml:
|
||||
with open(spec_yaml, 'r') as fd:
|
||||
yaml_text = fd.read()
|
||||
tty.debug('createtarball read spec yaml:')
|
||||
tty.debug(yaml_text)
|
||||
s = Spec.from_yaml(yaml_text)
|
||||
if spec_file:
|
||||
with open(spec_file, 'r') as fd:
|
||||
specfile_contents = fd.read()
|
||||
tty.debug('createtarball read specfile contents:')
|
||||
tty.debug(specfile_contents)
|
||||
if spec_file.endswith('.json'):
|
||||
s = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
s = Spec.from_yaml(specfile_contents)
|
||||
package = '/{0}'.format(s.dag_hash())
|
||||
matches = find_matching_specs(package, env=env)
|
||||
|
||||
@@ -350,7 +396,7 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
else:
|
||||
tty.die("build cache file creation requires at least one" +
|
||||
" installed package spec, an active environment," +
|
||||
" or else a path to a yaml file containing a spec" +
|
||||
" or else a path to a json or yaml file containing a spec" +
|
||||
" to install")
|
||||
specs = set()
|
||||
|
||||
@@ -419,7 +465,7 @@ def createtarball(args):
|
||||
"""create a binary package from an existing install"""
|
||||
|
||||
# restrict matching to current environment if one is active
|
||||
env = ev.get_env(args, 'buildcache create')
|
||||
env = ev.active_environment()
|
||||
|
||||
output_location = None
|
||||
if args.directory:
|
||||
@@ -459,7 +505,7 @@ def createtarball(args):
|
||||
add_spec = ('package' in args.things_to_install)
|
||||
add_deps = ('dependencies' in args.things_to_install)
|
||||
|
||||
_createtarball(env, spec_yaml=args.spec_yaml, packages=args.specs,
|
||||
_createtarball(env, spec_file=args.spec_file, packages=args.specs,
|
||||
add_spec=add_spec, add_deps=add_deps,
|
||||
output_location=output_location, signing_key=args.key,
|
||||
force=args.force, make_relative=args.rel,
|
||||
@@ -494,6 +540,15 @@ def install_tarball(spec, args):
|
||||
else:
|
||||
tarball = bindist.download_tarball(spec)
|
||||
if tarball:
|
||||
if args.sha256:
|
||||
checker = spack.util.crypto.Checker(args.sha256)
|
||||
msg = ('cannot verify checksum for "{0}"'
|
||||
' [expected={1}]')
|
||||
msg = msg.format(tarball, args.sha256)
|
||||
if not checker.check(tarball):
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing buildcache for spec %s' % spec.format())
|
||||
bindist.extract_tarball(spec, tarball, args.allow_root,
|
||||
args.unsigned, args.force)
|
||||
@@ -551,10 +606,10 @@ def check_binaries(args):
|
||||
its result, specifically, if the exit code is non-zero, then at least
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""
|
||||
if args.spec or args.spec_yaml:
|
||||
if args.spec or args.spec_file:
|
||||
specs = [get_concrete_spec(args)]
|
||||
else:
|
||||
env = ev.get_env(args, 'buildcache', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache')
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
|
||||
@@ -588,15 +643,16 @@ def download_buildcache_files(concrete_spec, local_dest, require_cdashid,
|
||||
|
||||
files_to_fetch = [
|
||||
{
|
||||
'url': tarball_path_name,
|
||||
'url': [tarball_path_name],
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': bindist.tarball_name(concrete_spec, '.spec.yaml'),
|
||||
'url': [bindist.tarball_name(concrete_spec, '.spec.json'),
|
||||
bindist.tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': local_dest,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': bindist.tarball_name(concrete_spec, '.cdashid'),
|
||||
'url': [bindist.tarball_name(concrete_spec, '.cdashid')],
|
||||
'path': local_dest,
|
||||
'required': require_cdashid,
|
||||
},
|
||||
@@ -610,9 +666,9 @@ def get_tarball(args):
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components. Normally, just the
|
||||
tarball and .spec.yaml files are required, but if the --require-cdashid
|
||||
tarball and .spec.json files are required, but if the --require-cdashid
|
||||
argument was provided, then a .cdashid file is also required."""
|
||||
if not args.spec and not args.spec_yaml:
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg('No specs provided, exiting.')
|
||||
sys.exit(0)
|
||||
|
||||
@@ -629,7 +685,7 @@ def get_tarball(args):
|
||||
|
||||
def get_concrete_spec(args):
|
||||
spec_str = args.spec
|
||||
spec_yaml_path = args.spec_yaml
|
||||
spec_yaml_path = args.spec_file
|
||||
|
||||
if not spec_str and not spec_yaml_path:
|
||||
tty.msg('Must provide either spec string or path to ' +
|
||||
@@ -661,14 +717,14 @@ def get_buildcache_name(args):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def save_spec_yamls(args):
|
||||
def save_specfiles(args):
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero."""
|
||||
if not args.root_spec and not args.root_spec_yaml:
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg('No root spec provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -676,20 +732,20 @@ def save_spec_yamls(args):
|
||||
tty.msg('No dependent specs provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if not args.yaml_dir:
|
||||
if not args.specfile_dir:
|
||||
tty.msg('No yaml directory provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_spec_yaml:
|
||||
with open(args.root_spec_yaml) as fd:
|
||||
root_spec_as_yaml = fd.read()
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash)
|
||||
|
||||
save_dependency_spec_yamls(
|
||||
root_spec_as_yaml, args.yaml_dir, args.specs.split())
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
@@ -698,10 +754,10 @@ def buildcache_copy(args):
|
||||
"""Copy a buildcache entry and all its files from one mirror, given as
|
||||
'--base-dir', to some other mirror, specified as '--destination-url'.
|
||||
The specific buildcache entry to be copied from one location to the
|
||||
other is identified using the '--spec-yaml' argument."""
|
||||
other is identified using the '--spec-file' argument."""
|
||||
# TODO: This sub-command should go away once #11117 is merged
|
||||
|
||||
if not args.spec_yaml:
|
||||
if not args.spec_file:
|
||||
tty.msg('No spec yaml provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -721,12 +777,12 @@ def buildcache_copy(args):
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open(args.spec_yaml, 'r') as fd:
|
||||
with open(args.spec_file, 'r') as fd:
|
||||
spec = Spec.from_yaml(fd.read())
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from yaml {0}'.format(
|
||||
args.spec_yaml))
|
||||
args.spec_file))
|
||||
sys.exit(1)
|
||||
|
||||
dest_root_path = dest_url
|
||||
@@ -741,10 +797,15 @@ def buildcache_copy(args):
|
||||
tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)
|
||||
|
||||
specfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.json'))
|
||||
specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
specfile_rel_path_yaml = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
cdashidfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
|
||||
@@ -760,12 +821,134 @@ def buildcache_copy(args):
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path))
|
||||
shutil.copyfile(specfile_src_path, specfile_dest_path)
|
||||
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
|
||||
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
|
||||
|
||||
# Copy the cdashid file (if exists) to the destination mirror
|
||||
if os.path.exists(cdashid_src_path):
|
||||
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
|
||||
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
|
||||
|
||||
|
||||
def buildcache_sync(args):
|
||||
""" Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
|
||||
Args:
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
"""
|
||||
# Figure out the source mirror
|
||||
source_location = None
|
||||
if args.src_directory:
|
||||
source_location = args.src_directory
|
||||
scheme = url_util.parse(source_location, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError(
|
||||
'"--src-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
source_location = 'file://' + source_location
|
||||
elif args.src_mirror_name:
|
||||
source_location = args.src_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(source_location)
|
||||
if result.name == "<unnamed>":
|
||||
raise ValueError(
|
||||
'no configured mirror named "{name}"'.format(
|
||||
name=source_location))
|
||||
elif args.src_mirror_url:
|
||||
source_location = args.src_mirror_url
|
||||
scheme = url_util.parse(source_location, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError(
|
||||
'"{url}" is not a valid URL'.format(url=source_location))
|
||||
|
||||
src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
|
||||
src_mirror_url = url_util.format(src_mirror.fetch_url)
|
||||
|
||||
# Figure out the destination mirror
|
||||
dest_location = None
|
||||
if args.dest_directory:
|
||||
dest_location = args.dest_directory
|
||||
scheme = url_util.parse(dest_location, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError(
|
||||
'"--dest-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
dest_location = 'file://' + dest_location
|
||||
elif args.dest_mirror_name:
|
||||
dest_location = args.dest_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(dest_location)
|
||||
if result.name == "<unnamed>":
|
||||
raise ValueError(
|
||||
'no configured mirror named "{name}"'.format(
|
||||
name=dest_location))
|
||||
elif args.dest_mirror_url:
|
||||
dest_location = args.dest_mirror_url
|
||||
scheme = url_util.parse(dest_location, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError(
|
||||
'"{url}" is not a valid URL'.format(url=dest_location))
|
||||
|
||||
dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
|
||||
dest_mirror_url = url_util.format(dest_mirror.fetch_url)
|
||||
|
||||
# Get the active environment
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache sync')
|
||||
|
||||
tty.msg('Syncing environment buildcache files from {0} to {1}'.format(
|
||||
src_mirror_url, dest_mirror_url))
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
buildcache_rel_paths = []
|
||||
|
||||
tty.debug('Syncing the following specs:')
|
||||
for s in env.all_specs():
|
||||
tty.debug(' {0}{1}: {2}'.format(
|
||||
'* ' if s in env.roots() else ' ', s.name, s.dag_hash()))
|
||||
|
||||
buildcache_rel_paths.extend([
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_path_name(s, '.spack')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.json')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
|
||||
])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
for rel_path in buildcache_rel_paths:
|
||||
src_url = url_util.join(src_mirror_url, rel_path)
|
||||
local_path = os.path.join(tmpdir, rel_path)
|
||||
dest_url = url_util.join(dest_mirror_url, rel_path)
|
||||
|
||||
tty.debug('Copying {0} to {1} via {2}'.format(
|
||||
src_url, dest_url, local_path))
|
||||
|
||||
stage = Stage(src_url,
|
||||
name="temporary_file",
|
||||
path=os.path.dirname(local_path),
|
||||
keep=True)
|
||||
|
||||
try:
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
web_util.push_to_url(
|
||||
local_path,
|
||||
dest_url,
|
||||
keep_original=True)
|
||||
except fs.FetchError as e:
|
||||
tty.debug('spack buildcache unable to sync {0}'.format(rel_path))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
stage.destroy()
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def update_index(mirror_url, update_keys=False):
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
outdir = url_util.format(mirror.push_url)
|
||||
|
@@ -63,6 +63,16 @@ def checksum(parser, args):
|
||||
if not url_dict:
|
||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
||||
|
||||
# And ensure the specified version URLs take precedence, if available
|
||||
try:
|
||||
explicit_dict = {}
|
||||
for v in pkg.versions:
|
||||
if not v.isdevelop():
|
||||
explicit_dict[v] = pkg.url_for_version(v)
|
||||
url_dict.update(explicit_dict)
|
||||
except spack.package.NoURLError:
|
||||
pass
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
|
@@ -22,6 +22,7 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -30,6 +31,7 @@
|
||||
level = "long"
|
||||
|
||||
CI_REBUILD_INSTALL_BASE_ARGS = ['spack', '-d', '-v']
|
||||
INSTALL_FAIL_CODE = 1
|
||||
|
||||
|
||||
def get_env_var(variable_name):
|
||||
@@ -76,8 +78,8 @@ def setup_parser(subparser):
|
||||
default=False, help="""Spack always check specs against configured
|
||||
binary mirrors when generating the pipeline, regardless of whether or not
|
||||
DAG pruning is enabled. This flag controls whether it might attempt to
|
||||
fetch remote spec.yaml files directly (ensuring no spec is rebuilt if it is
|
||||
present on the mirror), or whether it should reduce pipeline generation time
|
||||
fetch remote spec files directly (ensuring no spec is rebuilt if it
|
||||
is present on the mirror), or whether it should reduce pipeline generation time
|
||||
by assuming all remote buildcache indices are up to date and only use those
|
||||
to determine whether a given spec is up to date on mirrors. In the latter
|
||||
case, specs might be needlessly rebuilt if remote buildcache indices are out
|
||||
@@ -116,7 +118,7 @@ def ci_generate(args):
|
||||
for creating a build group for the generated workload and registering
|
||||
all generated jobs under that build group. If this environment
|
||||
variable is not set, no build group will be created on CDash."""
|
||||
env = ev.get_env(args, 'ci generate', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='ci generate')
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
@@ -150,7 +152,7 @@ def ci_generate(args):
|
||||
def ci_reindex(args):
|
||||
"""Rebuild the buildcache index associated with the mirror in the
|
||||
active, gitlab-enabled environment. """
|
||||
env = ev.get_env(args, 'ci rebuild-index', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild-index')
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
@@ -167,7 +169,7 @@ def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the full hash match of the spec
|
||||
as computed locally. """
|
||||
env = ev.get_env(args, 'ci rebuild', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
# to do.
|
||||
@@ -491,7 +493,7 @@ def ci_rebuild(args):
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == 1 and spack_is_develop_pipeline:
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
@@ -502,9 +504,17 @@ def ci_rebuild(args):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
empty_file_path = os.path.join(tmpdir, 'empty.txt')
|
||||
|
||||
broken_spec_details = {
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(empty_file_path, 'w') as efd:
|
||||
efd.write('')
|
||||
efd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
empty_file_path,
|
||||
broken_spec_path,
|
||||
@@ -566,6 +576,26 @@ def ci_rebuild(args):
|
||||
cdash_build_id, pipeline_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, pipeline_mirror_url)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
just_built_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||
broken_spec_path))
|
||||
try:
|
||||
web_util.remove_url(broken_spec_path)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = 'Error removing {0} from broken specs list: {1}'.format(
|
||||
broken_spec_path, err)
|
||||
tty.warn(msg)
|
||||
|
||||
else:
|
||||
tty.debug('spack install exited non-zero, will not create buildcache')
|
||||
|
||||
|
@@ -9,6 +9,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.caches
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.test
|
||||
@@ -102,7 +103,7 @@ def clean(parser, args):
|
||||
|
||||
if args.bootstrap:
|
||||
msg = 'Removing software in "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
tty.msg(msg.format(spack.bootstrap.store_path()))
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
uninstall = spack.main.SpackCommand('uninstall')
|
||||
uninstall('-a', '-y')
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'concretize an environment and write a lockfile'
|
||||
@@ -23,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def concretize(parser, args):
|
||||
env = ev.get_env(args, 'concretize', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='concretize')
|
||||
|
||||
if args.test == 'all':
|
||||
tests = True
|
||||
|
@@ -118,7 +118,7 @@ def _get_scope_and_section(args):
|
||||
|
||||
# w/no args and an active environment, point to env manifest
|
||||
if not section:
|
||||
env = ev.get_env(args, 'config edit')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
scope = env.env_file_config_scope_name()
|
||||
|
||||
|
@@ -636,7 +636,7 @@ def get_name(args):
|
||||
provided, extract the name from that. Otherwise, use a default.
|
||||
|
||||
Args:
|
||||
args (param argparse.Namespace): The arguments given to
|
||||
args (argparse.Namespace): The arguments given to
|
||||
``spack create``
|
||||
|
||||
Returns:
|
||||
@@ -709,8 +709,7 @@ def get_versions(args, name):
|
||||
name (str): The name of the package
|
||||
|
||||
Returns:
|
||||
str and BuildSystemGuesser: Versions and hashes, and a
|
||||
BuildSystemGuesser object
|
||||
tuple: versions and hashes, and a BuildSystemGuesser object
|
||||
"""
|
||||
|
||||
# Default version with hash
|
||||
@@ -794,7 +793,8 @@ def get_repository(args, name):
|
||||
name (str): The name of the package to create
|
||||
|
||||
Returns:
|
||||
Repo: A Repo object capable of determining the path to the package file
|
||||
spack.repo.Repo: A Repo object capable of determining the path to the
|
||||
package file
|
||||
"""
|
||||
spec = Spec(name)
|
||||
# Figure out namespace for spec
|
||||
|
@@ -36,7 +36,7 @@ def deactivate(parser, args):
|
||||
if len(specs) != 1:
|
||||
tty.die("deactivate requires one spec. %d given." % len(specs))
|
||||
|
||||
env = ev.get_env(args, 'deactivate')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
pkg = spec.package
|
||||
|
||||
|
@@ -74,6 +74,7 @@ def create_db_tarball(args):
|
||||
wd = os.path.dirname(str(spack.store.root))
|
||||
with working_dir(wd):
|
||||
files = [spack.store.db._index_path]
|
||||
files += glob('%s/*/*/*/.spack/spec.json' % base)
|
||||
files += glob('%s/*/*/*/.spack/spec.yaml' % base)
|
||||
files = [os.path.relpath(f) for f in files]
|
||||
|
||||
|
@@ -41,7 +41,7 @@ def dependencies(parser, args):
|
||||
tty.die("spack dependencies takes only one spec.")
|
||||
|
||||
if args.installed:
|
||||
env = ev.get_env(args, 'dependencies')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = '{name}{@version}{%compiler}{/hash:7}'
|
||||
|
@@ -59,7 +59,7 @@ def get_dependents(pkg_name, ideps, transitive=False, dependents=None):
|
||||
Args:
|
||||
pkg_name (str): name of the package whose dependents should be returned
|
||||
ideps (dict): dictionary of dependents, from inverted_dependencies()
|
||||
transitive (bool, optional): return transitive dependents when True
|
||||
transitive (bool or None): return transitive dependents when True
|
||||
"""
|
||||
if dependents is None:
|
||||
dependents = set()
|
||||
@@ -82,7 +82,7 @@ def dependents(parser, args):
|
||||
tty.die("spack dependents takes only one spec.")
|
||||
|
||||
if args.installed:
|
||||
env = ev.get_env(args, 'dependents')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = '{name}{@version}{%compiler}{/hash:7}'
|
||||
|
@@ -71,7 +71,7 @@ def setup_parser(sp):
|
||||
|
||||
def deprecate(parser, args):
|
||||
"""Deprecate one spec in favor of another"""
|
||||
env = ev.get_env(args, 'deprecate')
|
||||
env = ev.active_environment()
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
if len(specs) != 2:
|
||||
|
@@ -9,7 +9,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "add a spec to an environment's dev-build information"
|
||||
@@ -37,7 +36,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
env = ev.get_env(args, 'develop', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='develop')
|
||||
|
||||
if not args.spec:
|
||||
if args.clone is False:
|
||||
|
197
lib/spack/spack/cmd/diff.py
Normal file
197
lib/spack/spack/cmd/diff.py
Normal file
@@ -0,0 +1,197 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import cprint, get_color_when
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.solver.asp as asp
|
||||
import spack.util.environment
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
description = "compare two specs"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['specs'])
|
||||
|
||||
subparser.add_argument(
|
||||
'--json',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='dump_json',
|
||||
help="Dump json output instead of pretty printing."
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--first',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='load_first',
|
||||
help="load the first match if multiple packages match the spec"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-a', '--attribute',
|
||||
action='append',
|
||||
help="select the attributes to show (defaults to all)"
|
||||
)
|
||||
|
||||
|
||||
def compare_specs(a, b, to_string=False, color=None):
|
||||
"""
|
||||
Generate a comparison, including diffs (for each side) and an intersection.
|
||||
|
||||
We can either print the result to the console, or parse
|
||||
into a json object for the user to save. We return an object that shows
|
||||
the differences, intersection, and names for a pair of specs a and b.
|
||||
|
||||
Arguments:
|
||||
a (spack.spec.Spec): the first spec to compare
|
||||
b (spack.spec.Spec): the second spec to compare
|
||||
a_name (str): the name of spec a
|
||||
b_name (str): the name of spec b
|
||||
to_string (bool): return an object that can be json dumped
|
||||
color (bool): whether to format the names for the console
|
||||
"""
|
||||
if color is None:
|
||||
color = get_color_when()
|
||||
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True))
|
||||
|
||||
# We want to present them to the user as simple key: values
|
||||
intersect = sorted(a_facts.intersection(b_facts))
|
||||
spec1_not_spec2 = sorted(a_facts.difference(b_facts))
|
||||
spec2_not_spec1 = sorted(b_facts.difference(a_facts))
|
||||
|
||||
# Format the spec names to be colored
|
||||
fmt = "{name}{@version}{/hash}"
|
||||
a_name = a.format(fmt, color=color)
|
||||
b_name = b.format(fmt, color=color)
|
||||
|
||||
# We want to show what is the same, and then difference for each
|
||||
return {
|
||||
"intersect": flatten(intersect) if to_string else intersect,
|
||||
"a_not_b": flatten(spec1_not_spec2) if to_string else spec1_not_spec2,
|
||||
"b_not_a": flatten(spec2_not_spec1) if to_string else spec2_not_spec1,
|
||||
"a_name": a_name,
|
||||
"b_name": b_name,
|
||||
}
|
||||
|
||||
|
||||
def flatten(functions):
|
||||
"""
|
||||
Given a list of ASP functions, convert into a list of key: value tuples.
|
||||
|
||||
We are squashing whatever is after the first index into one string for
|
||||
easier parsing in the interface
|
||||
"""
|
||||
updated = []
|
||||
for fun in functions:
|
||||
updated.append([fun.name, " ".join(str(a) for a in fun.args)])
|
||||
return updated
|
||||
|
||||
|
||||
def print_difference(c, attributes="all", out=None):
|
||||
"""
|
||||
Print the difference.
|
||||
|
||||
Given a diffset for A and a diffset for B, print red/green diffs to show
|
||||
the differences.
|
||||
"""
|
||||
# Default to standard out unless another stream is provided
|
||||
out = out or sys.stdout
|
||||
|
||||
A = c['b_not_a']
|
||||
B = c['a_not_b']
|
||||
|
||||
cprint("@R{--- %s}" % c["a_name"]) # bright red
|
||||
cprint("@G{+++ %s}" % c["b_name"]) # bright green
|
||||
|
||||
# Cut out early if we don't have any differences!
|
||||
if not A and not B:
|
||||
print("No differences\n")
|
||||
return
|
||||
|
||||
def group_by_type(diffset):
|
||||
grouped = {}
|
||||
for entry in diffset:
|
||||
if entry[0] not in grouped:
|
||||
grouped[entry[0]] = []
|
||||
grouped[entry[0]].append(entry[1])
|
||||
|
||||
# Sort by second value to make comparison slightly closer
|
||||
for key, values in grouped.items():
|
||||
values.sort()
|
||||
return grouped
|
||||
|
||||
A = group_by_type(A)
|
||||
B = group_by_type(B)
|
||||
|
||||
# print a directionally relevant diff
|
||||
keys = list(A) + list(B)
|
||||
|
||||
category = None
|
||||
for key in keys:
|
||||
if "all" not in attributes and key not in attributes:
|
||||
continue
|
||||
|
||||
# Write the attribute, B is subtraction A is addition
|
||||
subtraction = [] if key not in B else B[key]
|
||||
addition = [] if key not in A else A[key]
|
||||
|
||||
# Bail out early if we don't have any entries
|
||||
if not subtraction and not addition:
|
||||
continue
|
||||
|
||||
# If we have a new category, create a new section
|
||||
if category != key:
|
||||
category = key
|
||||
|
||||
# print category in bold, colorized
|
||||
cprint("@*b{@@ %s @@}" % category) # bold blue
|
||||
|
||||
# Print subtractions first
|
||||
while subtraction:
|
||||
cprint("@R{- %s}" % subtraction.pop(0)) # bright red
|
||||
if addition:
|
||||
cprint("@G{+ %s}" % addition.pop(0)) # bright green
|
||||
|
||||
# Any additions left?
|
||||
while addition:
|
||||
cprint("@G{+ %s}" % addition.pop(0))
|
||||
|
||||
|
||||
def diff(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
if len(args.specs) != 2:
|
||||
tty.die("You must provide two specs to diff.")
|
||||
|
||||
specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.specs)]
|
||||
|
||||
# Calculate the comparison (c)
|
||||
color = False if args.dump_json else get_color_when()
|
||||
c = compare_specs(specs[0], specs[1], to_string=True, color=color)
|
||||
|
||||
# Default to all attributes
|
||||
attributes = args.attribute or ["all"]
|
||||
|
||||
if args.dump_json:
|
||||
print(sjson.dump(c))
|
||||
else:
|
||||
tty.warn("This interface is subject to change.\n")
|
||||
print_difference(c, attributes)
|
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -89,6 +88,11 @@ def env_activate(args):
|
||||
)
|
||||
return 1
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die('Calling spack env activate with --env, --env-dir and --no-env '
|
||||
'is ambiguous')
|
||||
|
||||
if ev.exists(env) and not args.dir:
|
||||
spack_env = ev.root(env)
|
||||
short_name = env
|
||||
@@ -103,12 +107,11 @@ def env_activate(args):
|
||||
tty.die("No such environment: '%s'" % env)
|
||||
|
||||
if spack_env == os.environ.get('SPACK_ENV'):
|
||||
tty.die("Environment %s is already active" % args.activate_env)
|
||||
tty.debug("Environment %s is already active" % args.activate_env)
|
||||
return
|
||||
|
||||
active_env = ev.get_env(namedtuple('args', ['env'])(env),
|
||||
'activate')
|
||||
cmds = ev.activate(
|
||||
active_env, add_view=args.with_view, shell=args.shell,
|
||||
ev.Environment(spack_env), add_view=args.with_view, shell=args.shell,
|
||||
prompt=env_prompt if args.prompt else None
|
||||
)
|
||||
sys.stdout.write(cmds)
|
||||
@@ -139,6 +142,11 @@ def env_deactivate(args):
|
||||
)
|
||||
return 1
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die('Calling spack env deactivate with --env, --env-dir and --no-env '
|
||||
'is ambiguous')
|
||||
|
||||
if 'SPACK_ENV' not in os.environ:
|
||||
tty.die('No environment is currently active.')
|
||||
|
||||
@@ -314,7 +322,7 @@ def env_view_setup_parser(subparser):
|
||||
|
||||
|
||||
def env_view(args):
|
||||
env = ev.get_env(args, 'env view')
|
||||
env = ev.active_environment()
|
||||
|
||||
if env:
|
||||
if args.action == ViewAction.regenerate:
|
||||
@@ -341,7 +349,7 @@ def env_status_setup_parser(subparser):
|
||||
|
||||
|
||||
def env_status(args):
|
||||
env = ev.get_env(args, 'env status')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
if env.path == os.getcwd():
|
||||
tty.msg('Using %s in current directory: %s'
|
||||
@@ -372,7 +380,7 @@ def env_loads_setup_parser(subparser):
|
||||
|
||||
|
||||
def env_loads(args):
|
||||
env = ev.get_env(args, 'env loads', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='env loads')
|
||||
|
||||
# Set the module types that have been selected
|
||||
module_type = args.module_type
|
||||
|
@@ -67,7 +67,7 @@ def extensions(parser, args):
|
||||
if not spec[0].package.extendable:
|
||||
tty.die("%s is not an extendable package." % spec[0].name)
|
||||
|
||||
env = ev.get_env(args, 'extensions')
|
||||
env = ev.active_environment()
|
||||
spec = cmd.disambiguate_spec(spec[0], env)
|
||||
|
||||
if not spec.package.extendable:
|
||||
|
@@ -47,7 +47,7 @@ def fetch(parser, args):
|
||||
# fetch all uninstalled specs from it otherwise fetch all.
|
||||
# If we are also not in an environment, complain to the
|
||||
# user that we don't know what to do.
|
||||
env = ev.get_env(args, "fetch")
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
if args.missing:
|
||||
specs = env.uninstalled_specs()
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
@@ -204,23 +205,24 @@ def display_env(env, args, decorator):
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
# Query the current store or the internal bootstrap store if required
|
||||
if args.bootstrap:
|
||||
msg = 'Showing internal bootstrap store at "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
results = args.specs(**q_args)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
bootstrap_store_path = spack.bootstrap.store_path()
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
msg = 'Showing internal bootstrap store at "{0}"'
|
||||
tty.msg(msg.format(bootstrap_store_path))
|
||||
_find(parser, args)
|
||||
return
|
||||
_find(parser, args)
|
||||
|
||||
|
||||
def _find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
decorator = lambda s, f: f
|
||||
added = set()
|
||||
removed = set()
|
||||
|
||||
env = ev.get_env(args, 'find')
|
||||
if env:
|
||||
decorator, added, roots, removed = setup_env(env)
|
||||
decorator, _, roots, _ = setup_env(env)
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
@@ -231,7 +233,7 @@ def find(parser, args):
|
||||
msg = "No package matches the query: {0}"
|
||||
msg = msg.format(' '.join(args.constraint))
|
||||
tty.msg(msg)
|
||||
return 1
|
||||
raise SystemExit(1)
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.uninstall
|
||||
import spack.environment
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
|
||||
description = "remove specs that are now no longer needed"
|
||||
@@ -24,7 +24,7 @@ def gc(parser, args):
|
||||
|
||||
# Restrict garbage collection to the active environment
|
||||
# speculating over roots that are yet to be installed
|
||||
env = spack.environment.get_env(args=None, cmd_name='gc')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
msg = 'Restricting the garbage collection to the "{0}" environment'
|
||||
tty.msg(msg.format(env.name))
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.graph import graph_ascii, graph_dot
|
||||
|
||||
@@ -35,7 +36,7 @@ def setup_parser(subparser):
|
||||
|
||||
subparser.add_argument(
|
||||
'-i', '--installed', action='store_true',
|
||||
help="graph all installed specs in dot format (implies --dot)")
|
||||
help="graph installed specs, or specs in the active env (implies --dot)")
|
||||
|
||||
arguments.add_common_arguments(subparser, ['deptype', 'specs'])
|
||||
|
||||
@@ -45,7 +46,12 @@ def graph(parser, args):
|
||||
if args.specs:
|
||||
tty.die("Can't specify specs with --installed")
|
||||
args.dot = True
|
||||
specs = spack.store.db.query()
|
||||
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
specs = env.all_specs()
|
||||
else:
|
||||
specs = spack.store.db.query()
|
||||
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=not args.static)
|
||||
|
@@ -198,13 +198,13 @@ def install_specs(cli_args, kwargs, specs):
|
||||
"""Do the actual installation.
|
||||
|
||||
Args:
|
||||
cli_args (Namespace): argparse namespace with command arguments
|
||||
cli_args (argparse.Namespace): argparse namespace with command arguments
|
||||
kwargs (dict): keyword arguments
|
||||
specs (list of tuples): list of (abstract, concrete) spec tuples
|
||||
specs (list): list of (abstract, concrete) spec tuples
|
||||
"""
|
||||
|
||||
# handle active environment, if any
|
||||
env = ev.get_env(cli_args, 'install')
|
||||
env = ev.active_environment()
|
||||
|
||||
try:
|
||||
if env:
|
||||
@@ -219,7 +219,7 @@ def install_specs(cli_args, kwargs, specs):
|
||||
|
||||
# If there is any ambiguity in the above call to matching_spec
|
||||
# (i.e. if more than one spec in the environment matches), then
|
||||
# SpackEnvironmentError is rasied, with a message listing the
|
||||
# SpackEnvironmentError is raised, with a message listing the
|
||||
# the matches. Getting to this point means there were either
|
||||
# no matches or exactly one match.
|
||||
|
||||
@@ -243,7 +243,7 @@ def install_specs(cli_args, kwargs, specs):
|
||||
|
||||
if m_spec in env.roots() or cli_args.no_add:
|
||||
# either the single match is a root spec (and --no-add is
|
||||
# the default for roots) or --no-add was stated explictly
|
||||
# the default for roots) or --no-add was stated explicitly
|
||||
tty.debug('just install {0}'.format(m_spec.name))
|
||||
specs_to_install.append(m_spec)
|
||||
else:
|
||||
@@ -324,10 +324,14 @@ def get_tests(specs):
|
||||
else:
|
||||
return False
|
||||
|
||||
# Parse cli arguments and construct a dictionary
|
||||
# that will be passed to the package installer
|
||||
update_kwargs_from_args(args, kwargs)
|
||||
|
||||
if not args.spec and not args.specfiles:
|
||||
# if there are no args but an active environment
|
||||
# then install the packages from it.
|
||||
env = ev.get_env(args, 'install')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
tests = get_tests(env.user_specs)
|
||||
kwargs['tests'] = tests
|
||||
@@ -352,7 +356,7 @@ def get_tests(specs):
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
env.install_all(args, **kwargs)
|
||||
env.install_all(**kwargs)
|
||||
|
||||
tty.debug("Regenerating environment views for {0}"
|
||||
.format(env.name))
|
||||
@@ -381,10 +385,6 @@ def get_tests(specs):
|
||||
if args.deprecated:
|
||||
spack.config.set('config:deprecated', True, scope='command_line')
|
||||
|
||||
# Parse cli arguments and construct a dictionary
|
||||
# that will be passed to the package installer
|
||||
update_kwargs_from_args(args, kwargs)
|
||||
|
||||
# 1. Abstract specs from cli
|
||||
abstract_specs = spack.cmd.parse_specs(args.spec)
|
||||
tests = get_tests(abstract_specs)
|
||||
@@ -401,7 +401,10 @@ def get_tests(specs):
|
||||
# 2. Concrete specs from yaml files
|
||||
for file in args.specfiles:
|
||||
with open(file, 'r') as f:
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
if file.endswith('yaml') or file.endswith('yml'):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = s.concretized()
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
|
@@ -175,7 +175,8 @@ def wrong_spdx_identifier(line, path):
|
||||
if error:
|
||||
return error
|
||||
|
||||
print('{0}: the license does not match the expected format'.format(path))
|
||||
print('{0}: the license header at the top of the file does not match the \
|
||||
expected format'.format(path))
|
||||
return GENERAL_MISMATCH
|
||||
|
||||
|
||||
|
@@ -55,7 +55,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def load(parser, args):
|
||||
env = ev.get_env(args, 'load')
|
||||
env = ev.active_environment()
|
||||
specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.specs)]
|
||||
|
||||
|
@@ -11,7 +11,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
@@ -73,7 +72,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.location_env:
|
||||
path = spack.environment.root(args.location_env)
|
||||
path = ev.root(args.location_env)
|
||||
if not os.path.isdir(path):
|
||||
tty.die("no such environment: '%s'" % args.location_env)
|
||||
print(path)
|
||||
@@ -97,7 +96,7 @@ def location(parser, args):
|
||||
|
||||
# install_dir command matches against installed specs.
|
||||
if args.install_dir:
|
||||
env = ev.get_env(args, 'location')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
print(spec.prefix)
|
||||
return
|
||||
|
@@ -253,7 +253,7 @@ def _determine_specs_to_mirror(args):
|
||||
"To mirror all packages, use the '--all' option"
|
||||
" (this will require significant time and space).")
|
||||
|
||||
env = ev.get_env(args, 'mirror')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
env_specs = env.all_specs()
|
||||
else:
|
||||
|
@@ -3,11 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
from typing import Callable, Dict # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd.modules.lmod
|
||||
import spack.cmd.modules.tcl
|
||||
|
||||
@@ -18,49 +15,12 @@
|
||||
|
||||
_subcommands = {} # type: Dict[str, Callable]
|
||||
|
||||
_deprecated_commands = ('refresh', 'find', 'rm', 'loads')
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
|
||||
spack.cmd.modules.lmod.add_command(sp, _subcommands)
|
||||
spack.cmd.modules.tcl.add_command(sp, _subcommands)
|
||||
|
||||
for name in _deprecated_commands:
|
||||
add_deprecated_command(sp, name)
|
||||
|
||||
|
||||
def add_deprecated_command(subparser, name):
|
||||
parser = subparser.add_parser(name)
|
||||
parser.add_argument(
|
||||
'-m', '--module-type', help=argparse.SUPPRESS,
|
||||
choices=spack.modules.module_types.keys(), action='append'
|
||||
)
|
||||
|
||||
|
||||
def handle_deprecated_command(args, unknown_args):
|
||||
command = args.module_command
|
||||
unknown = ' '.join(unknown_args)
|
||||
|
||||
module_types = args.module_type or ['tcl']
|
||||
|
||||
msg = '`spack module {0} {1}` has moved. Use these commands instead:\n'
|
||||
msg = msg.format(command, ' '.join('-m ' + x for x in module_types))
|
||||
for x in module_types:
|
||||
msg += '\n\t$ spack module {0} {1} {2}'.format(x, command, unknown)
|
||||
msg += '\n'
|
||||
tty.die(msg)
|
||||
|
||||
|
||||
def module(parser, args, unknown_args):
|
||||
|
||||
# Here we permit unknown arguments to intercept deprecated calls
|
||||
if args.module_command in _deprecated_commands:
|
||||
handle_deprecated_command(args, unknown_args)
|
||||
|
||||
# Fail if unknown arguments are present, once we excluded a deprecated
|
||||
# command
|
||||
if unknown_args:
|
||||
tty.die('unrecognized arguments: {0}'.format(' '.join(unknown_args)))
|
||||
|
||||
def module(parser, args):
|
||||
_subcommands[args.module_command](parser, args)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'remove specs from an environment'
|
||||
section = "environments"
|
||||
@@ -28,7 +27,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def remove(parser, args):
|
||||
env = ev.get_env(args, 'remove', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='remove')
|
||||
|
||||
with env.write_transaction():
|
||||
if args.all:
|
||||
|
@@ -34,7 +34,7 @@ def stage(parser, args):
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.get_env(args, 'stage')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
tty.msg("Staging specs from environment %s" % env.name)
|
||||
for spec in env.specs_by_hash.values():
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.paths
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -38,15 +39,20 @@ def grouper(iterable, n, fillvalue=None):
|
||||
yield filter(None, group)
|
||||
|
||||
|
||||
#: directory where spack style started, for relativizing paths
|
||||
initial_working_dir = None
|
||||
#: List of directories to exclude from checks -- relative to spack root
|
||||
exclude_directories = [
|
||||
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
|
||||
]
|
||||
|
||||
#: List of directories to exclude from checks.
|
||||
exclude_directories = [spack.paths.external_path]
|
||||
|
||||
#: order in which tools should be run. flake8 is last so that it can
|
||||
#: Order in which tools should be run. flake8 is last so that it can
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
tool_order = ["isort", "mypy", "black", "flake8"]
|
||||
#: The list maps an executable name to a spack spec needed to install it.
|
||||
tool_order = [
|
||||
("isort", "py-isort@4.3.5:"),
|
||||
("mypy", "py-mypy@0.900:"),
|
||||
("black", "py-black"),
|
||||
("flake8", "py-flake8"),
|
||||
]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
@@ -59,7 +65,7 @@ def is_package(f):
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/") or "docs/tutorial/examples" in f
|
||||
return f.startswith("var/spack/repos/")
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -73,14 +79,29 @@ def __call__(self, fun):
|
||||
return fun
|
||||
|
||||
|
||||
def changed_files(base=None, untracked=True, all_files=False):
|
||||
"""Get list of changed files in the Spack repository."""
|
||||
def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
||||
"""Get list of changed files in the Spack repository.
|
||||
|
||||
Arguments:
|
||||
base (str): name of base branch to evaluate differences with.
|
||||
untracked (bool): include untracked files in the list.
|
||||
all_files (bool): list all files in the repository.
|
||||
root (str): use this directory instead of the Spack prefix.
|
||||
"""
|
||||
if root is None:
|
||||
root = spack.paths.prefix
|
||||
|
||||
git = which("git", required=True)
|
||||
|
||||
# GITHUB_BASE_REF is set to the base branch for pull request actions
|
||||
if base is None:
|
||||
base = os.environ.get("GITHUB_BASE_REF", "develop")
|
||||
# ensure base is in the repo
|
||||
git("show-ref", "--verify", "--quiet", "refs/heads/%s" % base,
|
||||
fail_on_error=False)
|
||||
if git.returncode != 0:
|
||||
tty.die(
|
||||
"This repository does not have a '%s' branch." % base,
|
||||
"spack style needs this branch to determine which files changed.",
|
||||
"Ensure that '%s' exists, or specify files to check explicitly." % base
|
||||
)
|
||||
|
||||
range = "{0}...".format(base)
|
||||
|
||||
@@ -101,7 +122,10 @@ def changed_files(base=None, untracked=True, all_files=False):
|
||||
if all_files:
|
||||
git_args.append(["ls-files", "--exclude-standard"])
|
||||
|
||||
excludes = [os.path.realpath(f) for f in exclude_directories]
|
||||
excludes = [
|
||||
os.path.realpath(os.path.join(root, f))
|
||||
for f in exclude_directories
|
||||
]
|
||||
changed = set()
|
||||
|
||||
for arg_list in git_args:
|
||||
@@ -126,8 +150,8 @@ def setup_parser(subparser):
|
||||
"-b",
|
||||
"--base",
|
||||
action="store",
|
||||
default=None,
|
||||
help="select base branch for collecting list of modified files",
|
||||
default="develop",
|
||||
help="branch to compare against to determine changed files (default: develop)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
@@ -181,14 +205,20 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
help="run black if available (default: skip black)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--root",
|
||||
action="store",
|
||||
default=None,
|
||||
help="style check a different spack instance",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"files", nargs=argparse.REMAINDER, help="specific files to check"
|
||||
)
|
||||
|
||||
|
||||
def cwd_relative(path):
|
||||
def cwd_relative(path, args):
|
||||
"""Translate prefix-relative path to current working directory-relative."""
|
||||
return os.path.relpath(os.path.join(spack.paths.prefix, path), initial_working_dir)
|
||||
return os.path.relpath(os.path.join(args.root, path), args.initial_working_dir)
|
||||
|
||||
|
||||
def rewrite_and_print_output(
|
||||
@@ -198,7 +228,7 @@ def rewrite_and_print_output(
|
||||
# print results relative to current working directory
|
||||
def translate(match):
|
||||
return replacement.format(
|
||||
cwd_relative(match.group(1)), *list(match.groups()[1:])
|
||||
cwd_relative(match.group(1), args), *list(match.groups()[1:])
|
||||
)
|
||||
|
||||
for line in output.split("\n"):
|
||||
@@ -210,15 +240,15 @@ def translate(match):
|
||||
|
||||
|
||||
def print_style_header(file_list, args):
|
||||
tools = [tool for tool in tool_order if getattr(args, tool)]
|
||||
tty.msg("Running style checks on spack:", "selected: " + ", ".join(tools))
|
||||
tools = [tool for tool, _ in tool_order if getattr(args, tool)]
|
||||
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools))
|
||||
|
||||
# translate modified paths to cwd_relative if needed
|
||||
paths = [filename.strip() for filename in file_list]
|
||||
if not args.root_relative:
|
||||
paths = [cwd_relative(filename) for filename in paths]
|
||||
paths = [cwd_relative(filename, args) for filename in paths]
|
||||
|
||||
tty.msg("Modified files:", *paths)
|
||||
tty.msg("Modified files", *paths)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
@@ -242,12 +272,9 @@ def run_flake8(flake8_cmd, file_list, args):
|
||||
# run in chunks of 100 at a time to avoid line length limit
|
||||
# filename parameter in config *does not work* for this reliably
|
||||
for chunk in grouper(file_list, 100):
|
||||
|
||||
output = flake8_cmd(
|
||||
# use .flake8 implicitly to work around bug in flake8 upstream
|
||||
# append-config is ignored if `--config` is explicitly listed
|
||||
# see: https://gitlab.com/pycqa/flake8/-/issues/455
|
||||
# "--config=.flake8",
|
||||
# always run with config from running spack prefix
|
||||
"--config=%s" % os.path.join(spack.paths.prefix, ".flake8"),
|
||||
*chunk,
|
||||
fail_on_error=False,
|
||||
output=str
|
||||
@@ -262,12 +289,18 @@ def run_flake8(flake8_cmd, file_list, args):
|
||||
|
||||
@tool("mypy")
|
||||
def run_mypy(mypy_cmd, file_list, args):
|
||||
mpy_args = ["--package", "spack", "--package", "llnl", "--show-error-codes"]
|
||||
# always run with config from running spack prefix
|
||||
mypy_args = [
|
||||
"--config-file", os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||
"--package", "spack",
|
||||
"--package", "llnl",
|
||||
"--show-error-codes",
|
||||
]
|
||||
# not yet, need other updates to enable this
|
||||
# if any([is_package(f) for f in file_list]):
|
||||
# mpy_args.extend(["--package", "packages"])
|
||||
# mypy_args.extend(["--package", "packages"])
|
||||
|
||||
output = mypy_cmd(*mpy_args, fail_on_error=False, output=str)
|
||||
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
|
||||
returncode = mypy_cmd.returncode
|
||||
|
||||
rewrite_and_print_output(output, args)
|
||||
@@ -278,13 +311,16 @@ def run_mypy(mypy_cmd, file_list, args):
|
||||
|
||||
@tool("isort")
|
||||
def run_isort(isort_cmd, file_list, args):
|
||||
check_fix_args = () if args.fix else ("--check", "--diff")
|
||||
# always run with config from running spack prefix
|
||||
isort_args = ("--settings-path", os.path.join(spack.paths.prefix, "pyproject.toml"))
|
||||
if not args.fix:
|
||||
isort_args += ("--check", "--diff")
|
||||
|
||||
pat = re.compile("ERROR: (.*) Imports are incorrectly sorted")
|
||||
replacement = "ERROR: {0} Imports are incorrectly sorted"
|
||||
returncode = 0
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = check_fix_args + tuple(chunk)
|
||||
packed_args = isort_args + tuple(chunk)
|
||||
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode |= isort_cmd.returncode
|
||||
|
||||
@@ -296,7 +332,12 @@ def run_isort(isort_cmd, file_list, args):
|
||||
|
||||
@tool("black")
|
||||
def run_black(black_cmd, file_list, args):
|
||||
check_fix_args = () if args.fix else ("--check", "--diff", "--color")
|
||||
# always run with config from running spack prefix
|
||||
black_args = ("--config", os.path.join(spack.paths.prefix, "pyproject.toml"))
|
||||
if not args.fix:
|
||||
black_args += ("--check", "--diff")
|
||||
if color.get_color_when(): # only show color when spack would
|
||||
black_args += ("--color",)
|
||||
|
||||
pat = re.compile("would reformat +(.*)")
|
||||
replacement = "would reformat {0}"
|
||||
@@ -305,50 +346,77 @@ def run_black(black_cmd, file_list, args):
|
||||
# run in chunks of 100 at a time to avoid line length limit
|
||||
# filename parameter in config *does not work* for this reliably
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = check_fix_args + tuple(chunk)
|
||||
packed_args = black_args + tuple(chunk)
|
||||
output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode |= black_cmd.returncode
|
||||
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
print_tool_result("black", returncode)
|
||||
|
||||
return returncode
|
||||
|
||||
|
||||
def style(parser, args):
|
||||
# ensure python version is new enough
|
||||
if sys.version_info < (3, 6):
|
||||
tty.die("spack style requires Python 3.6 or later.")
|
||||
|
||||
# save initial working directory for relativizing paths later
|
||||
global initial_working_dir
|
||||
initial_working_dir = os.getcwd()
|
||||
args.initial_working_dir = os.getcwd()
|
||||
|
||||
# ensure that the config files we need actually exist in the spack prefix.
|
||||
# assertions b/c users should not ever see these errors -- they're checked in CI.
|
||||
assert os.path.isfile(os.path.join(spack.paths.prefix, "pyproject.toml"))
|
||||
assert os.path.isfile(os.path.join(spack.paths.prefix, ".flake8"))
|
||||
|
||||
# validate spack root if the user provided one
|
||||
args.root = os.path.realpath(args.root) if args.root else spack.paths.prefix
|
||||
spack_script = os.path.join(args.root, "bin", "spack")
|
||||
if not os.path.exists(spack_script):
|
||||
tty.die(
|
||||
"This does not look like a valid spack root.",
|
||||
"No such file: '%s'" % spack_script
|
||||
)
|
||||
|
||||
file_list = args.files
|
||||
if file_list:
|
||||
|
||||
def prefix_relative(path):
|
||||
return os.path.relpath(
|
||||
os.path.abspath(os.path.realpath(path)), spack.paths.prefix
|
||||
)
|
||||
return os.path.relpath(os.path.abspath(os.path.realpath(path)), args.root)
|
||||
|
||||
file_list = [prefix_relative(p) for p in file_list]
|
||||
|
||||
returncode = 0
|
||||
with working_dir(spack.paths.prefix):
|
||||
with working_dir(args.root):
|
||||
if not file_list:
|
||||
file_list = changed_files(args.base, args.untracked, args.all)
|
||||
print_style_header(file_list, args)
|
||||
|
||||
# run tools in order defined in tool_order
|
||||
returncode = 0
|
||||
for tool_name in tool_order:
|
||||
for tool_name, tool_spec in tool_order:
|
||||
if getattr(args, tool_name):
|
||||
run_function, required = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
|
||||
cmd = which(tool_name, required=required)
|
||||
if not cmd:
|
||||
color.cprint(" @y{%s not in PATH, skipped}" % tool_name)
|
||||
continue
|
||||
try:
|
||||
# Bootstrap tools so we don't need to require install
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spec = spack.spec.Spec(tool_spec)
|
||||
cmd = None
|
||||
cmd = spack.bootstrap.get_executable(
|
||||
tool_name, spec=spec, install=True
|
||||
)
|
||||
if not cmd:
|
||||
color.cprint(" @y{%s not in PATH, skipped}" % tool_name)
|
||||
continue
|
||||
returncode |= run_function(cmd, file_list, args)
|
||||
|
||||
returncode |= run_function(cmd, file_list, args)
|
||||
except Exception as e:
|
||||
raise spack.error.SpackError(
|
||||
"Couldn't bootstrap %s:" % tool_name, str(e)
|
||||
)
|
||||
|
||||
if returncode == 0:
|
||||
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
||||
|
@@ -155,7 +155,7 @@ def test_run(args):
|
||||
spack.config.set('config:fail_fast', True, scope='command_line')
|
||||
|
||||
# Get specs to test
|
||||
env = ev.get_env(args, 'test')
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
@@ -221,7 +221,7 @@ def test_list(args):
|
||||
|
||||
# TODO: This can be extended to have all of the output formatting options
|
||||
# from `spack find`.
|
||||
env = ev.get_env(args, 'test')
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.store.db.query(hashes=hashes)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'remove specs from an environment'
|
||||
section = "environments"
|
||||
@@ -22,7 +21,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def undevelop(parser, args):
|
||||
env = ev.get_env(args, 'undevelop', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='undevelop')
|
||||
|
||||
if args.all:
|
||||
specs = env.dev_specs.keys()
|
||||
|
@@ -69,12 +69,13 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
|
||||
concretized specs given from cli
|
||||
|
||||
Args:
|
||||
env (Environment): active environment, or ``None`` if there is not one
|
||||
env (spack.environment.Environment): active environment, or ``None``
|
||||
if there is not one
|
||||
specs (list): list of specs to be matched against installed packages
|
||||
allow_multiple_matches (bool): if True multiple matches are admitted
|
||||
|
||||
Return:
|
||||
list of specs
|
||||
list: list of specs
|
||||
"""
|
||||
# constrain uninstall resolution to current environment if one is active
|
||||
hashes = env.all_hashes() if env else None
|
||||
@@ -118,15 +119,13 @@ def installed_dependents(specs, env):
|
||||
|
||||
Args:
|
||||
specs (list): list of Specs
|
||||
env (Environment): the active environment, or None
|
||||
env (spack.environment.Environment or None): the active environment, or None
|
||||
|
||||
Returns:
|
||||
(tuple of dicts): two mappings: one from specs to their dependent
|
||||
environments in the active environment (or global scope if
|
||||
there is no environment), and one from specs to their
|
||||
dependents in *inactive* environments (empty if there is no
|
||||
environment
|
||||
|
||||
tuple: two mappings: one from specs to their dependent environments in the
|
||||
active environment (or global scope if there is no environment), and one from
|
||||
specs to their dependents in *inactive* environments (empty if there is no
|
||||
environment
|
||||
"""
|
||||
active_dpts = {}
|
||||
inactive_dpts = {}
|
||||
@@ -155,9 +154,9 @@ def dependent_environments(specs):
|
||||
|
||||
Args:
|
||||
specs (list): list of Specs
|
||||
Returns:
|
||||
(dict): mapping from spec to lists of dependent Environments
|
||||
|
||||
Returns:
|
||||
dict: mapping from spec to lists of dependent Environments
|
||||
"""
|
||||
dependents = {}
|
||||
for env in ev.all_environments():
|
||||
@@ -176,9 +175,10 @@ def inactive_dependent_environments(spec_envs):
|
||||
have no dependent environments. Return the result.
|
||||
|
||||
Args:
|
||||
(dict): mapping from spec to lists of dependent Environments
|
||||
spec_envs (dict): mapping from spec to lists of dependent Environments
|
||||
|
||||
Returns:
|
||||
(dict): mapping from spec to lists of *inactive* dependent Environments
|
||||
dict: mapping from spec to lists of *inactive* dependent Environments
|
||||
"""
|
||||
spec_inactive_envs = {}
|
||||
for spec, de_list in spec_envs.items():
|
||||
@@ -203,7 +203,8 @@ def do_uninstall(env, specs, force):
|
||||
"""Uninstalls all the specs in a list.
|
||||
|
||||
Args:
|
||||
env (Environment): active environment, or ``None`` if there is not one
|
||||
env (spack.environment.Environment or None): active environment, or ``None``
|
||||
if there is not one
|
||||
specs (list): list of specs to be uninstalled
|
||||
force (bool): force uninstallation (boolean)
|
||||
"""
|
||||
@@ -310,7 +311,7 @@ def get_uninstall_list(args, specs, env):
|
||||
|
||||
|
||||
def uninstall_specs(args, specs):
|
||||
env = ev.get_env(args, 'uninstall')
|
||||
env = ev.active_environment()
|
||||
|
||||
uninstall_list, remove_list = get_uninstall_list(args, specs, env)
|
||||
anything_to_do = set(uninstall_list).union(set(remove_list))
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user