Compare commits
735 Commits
develop-20
...
features/s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
655ee3387b | ||
|
|
a1d37bbd7a | ||
|
|
af4cb0a14b | ||
|
|
930dfd7cc4 | ||
|
|
9b53bb09f5 | ||
|
|
95b9be2c14 | ||
|
|
43c8bb9fa3 | ||
|
|
e59901d1f4 | ||
|
|
3d84549fbe | ||
|
|
a69c5b3e32 | ||
|
|
e3cce2bd96 | ||
|
|
0d668e4e92 | ||
|
|
ad6c7380c5 | ||
|
|
c064a30765 | ||
|
|
4a4f156d99 | ||
|
|
cb8878aaf4 | ||
|
|
d49f3a0960 | ||
|
|
15413c7258 | ||
|
|
de754c7a47 | ||
|
|
ac9398ed21 | ||
|
|
57769fac7d | ||
|
|
c65fd7e12d | ||
|
|
c71d778875 | ||
|
|
a7313dc407 | ||
|
|
31477d5dc7 | ||
|
|
382ba0d041 | ||
|
|
886c950423 | ||
|
|
3798b16a29 | ||
|
|
796617054d | ||
|
|
78fc25ec12 | ||
|
|
6de51fdc58 | ||
|
|
430ba496d1 | ||
|
|
e1ede9c04b | ||
|
|
856dd3417b | ||
|
|
e49c6f68bc | ||
|
|
eed7a1af24 | ||
|
|
22e40541c7 | ||
|
|
8561c89c25 | ||
|
|
6501705de2 | ||
|
|
0b3e1fd412 | ||
|
|
c260da5127 | ||
|
|
f63261dc65 | ||
|
|
1c081611ea | ||
|
|
428b4e340a | ||
|
|
20bf239a6a | ||
|
|
cd682613cf | ||
|
|
c1852e3706 | ||
|
|
855a8476e4 | ||
|
|
d4a892f200 | ||
|
|
66e2836ba1 | ||
|
|
52ab0c66fe | ||
|
|
f316068b27 | ||
|
|
553cc3b70a | ||
|
|
f0f9a16e4f | ||
|
|
9ec8eaa0d3 | ||
|
|
00182b19dc | ||
|
|
cc7a29c55a | ||
|
|
61b0f4f84d | ||
|
|
fe3bfa482e | ||
|
|
e5f53a6250 | ||
|
|
a7e8080784 | ||
|
|
f5e934f2dc | ||
|
|
54b57c5d1e | ||
|
|
725ef8f5c8 | ||
|
|
f51a9a9107 | ||
|
|
4f0e336ed0 | ||
|
|
64774f3015 | ||
|
|
4e9fbca033 | ||
|
|
a2fd26bbcc | ||
|
|
067da09b46 | ||
|
|
b1b0c108bb | ||
|
|
c624088a7b | ||
|
|
a965c7c5c8 | ||
|
|
904d43f0e6 | ||
|
|
10b6d7282a | ||
|
|
7112a49d1e | ||
|
|
b11bd6b745 | ||
|
|
4d0b04cf34 | ||
|
|
165c171659 | ||
|
|
aa3c62d936 | ||
|
|
cba2fe914c | ||
|
|
1b82779087 | ||
|
|
55b1b0f3f0 | ||
|
|
4606c8ed68 | ||
|
|
dd53eeb322 | ||
|
|
f42486b684 | ||
|
|
44ecea3813 | ||
|
|
f1114858f5 | ||
|
|
2b6bdc7013 | ||
|
|
586a35be43 | ||
|
|
7a8dc36760 | ||
|
|
e01151a200 | ||
|
|
29b50527a6 | ||
|
|
94961ffe0a | ||
|
|
03a7da1e44 | ||
|
|
97ffe2e575 | ||
|
|
7b10aae356 | ||
|
|
b61cd74707 | ||
|
|
374d94edf7 | ||
|
|
827522d825 | ||
|
|
8ba6e7eed2 | ||
|
|
e40c10509d | ||
|
|
21a2c3a591 | ||
|
|
70eb7506df | ||
|
|
2b95eecb83 | ||
|
|
df8507f470 | ||
|
|
645c8eeaeb | ||
|
|
b693987f95 | ||
|
|
7999686856 | ||
|
|
7001a2a65a | ||
|
|
7c985d6432 | ||
|
|
a66586d749 | ||
|
|
6b73f00310 | ||
|
|
063b987ceb | ||
|
|
fe19394bf9 | ||
|
|
e09955d83b | ||
|
|
d367f14d5e | ||
|
|
6f61e382da | ||
|
|
63e680e4f9 | ||
|
|
27557a133b | ||
|
|
78810e95ed | ||
|
|
553b44473f | ||
|
|
966a775a45 | ||
|
|
327c75386a | ||
|
|
a2cb7ee803 | ||
|
|
2a5d4b2291 | ||
|
|
3b59817ea7 | ||
|
|
06eacdf9d8 | ||
|
|
bfdcdb4851 | ||
|
|
83873d06a1 | ||
|
|
91333919c6 | ||
|
|
cd6237cac4 | ||
|
|
91412fb595 | ||
|
|
678c995415 | ||
|
|
63af548271 | ||
|
|
200dfb0346 | ||
|
|
e2f605f6e9 | ||
|
|
3cf1914b7e | ||
|
|
cd7a49114c | ||
|
|
1144487ee7 | ||
|
|
742b78d2b5 | ||
|
|
633d1d2ccb | ||
|
|
9adefd587e | ||
|
|
102a30a5a2 | ||
|
|
7ddc886d6d | ||
|
|
9e7183fb14 | ||
|
|
18ab3c20ce | ||
|
|
b91b42dc7b | ||
|
|
7900d0b3db | ||
|
|
847d7bc87d | ||
|
|
078984dcf4 | ||
|
|
010324714f | ||
|
|
7ce5ac1e6e | ||
|
|
565165f02d | ||
|
|
e4869cd558 | ||
|
|
990e0dc526 | ||
|
|
f9d8b6b5aa | ||
|
|
2079b888c8 | ||
|
|
2dbc5213b0 | ||
|
|
7a83cdbcc7 | ||
|
|
da33c12ad4 | ||
|
|
c30979ed66 | ||
|
|
5d7d18d028 | ||
|
|
92e42bbed9 | ||
|
|
899ac78887 | ||
|
|
7bec524dd5 | ||
|
|
546e0925b0 | ||
|
|
95b533ddcd | ||
|
|
28fe85ae66 | ||
|
|
6b936884f5 | ||
|
|
7b879d092d | ||
|
|
007c1148c0 | ||
|
|
8b2fec61f2 | ||
|
|
1cebb7e1c3 | ||
|
|
6f8d8ba47e | ||
|
|
9464898449 | ||
|
|
0902910784 | ||
|
|
7050ace968 | ||
|
|
7efbad0d81 | ||
|
|
2298abd7f4 | ||
|
|
46efa7e151 | ||
|
|
60c589db28 | ||
|
|
ca9a7b2033 | ||
|
|
470a26bbcd | ||
|
|
b52e4fc650 | ||
|
|
a653579e56 | ||
|
|
7f89391b14 | ||
|
|
34c98101ad | ||
|
|
f1ea979e2b | ||
|
|
55cbdd435c | ||
|
|
1cce947be6 | ||
|
|
0a735c6ea6 | ||
|
|
5400b1e222 | ||
|
|
ef461befcc | ||
|
|
831b4a3e4a | ||
|
|
6007a77a33 | ||
|
|
a2794f04bc | ||
|
|
3ae3bfd997 | ||
|
|
5f3f968a1f | ||
|
|
652de07d8c | ||
|
|
c16191d9ea | ||
|
|
1b1663acea | ||
|
|
d2f269ed7b | ||
|
|
4584d85ca6 | ||
|
|
2106a2be26 | ||
|
|
228c82502d | ||
|
|
431f5627d9 | ||
|
|
fb315c37ba | ||
|
|
f9fa160a24 | ||
|
|
1ee29929a7 | ||
|
|
97e691cdbf | ||
|
|
51ba25fec3 | ||
|
|
81281646e9 | ||
|
|
85ec4cca92 | ||
|
|
f3c21b0177 | ||
|
|
51ac4686b4 | ||
|
|
82752ad0b7 | ||
|
|
b231e6e9e9 | ||
|
|
90f8c20133 | ||
|
|
9835b072e2 | ||
|
|
f438a33978 | ||
|
|
8ded2ddf5e | ||
|
|
e3904d4cbf | ||
|
|
e1bcbcf9f3 | ||
|
|
fa671a639a | ||
|
|
28171f1b9d | ||
|
|
8de03e2bf5 | ||
|
|
2fa314b6b6 | ||
|
|
7780059c64 | ||
|
|
7e69671570 | ||
|
|
5650d4d419 | ||
|
|
fa38dd9386 | ||
|
|
16a2a5047c | ||
|
|
899e458ee5 | ||
|
|
4a8d09dcc1 | ||
|
|
98e206193b | ||
|
|
6a6c295938 | ||
|
|
9a1002c098 | ||
|
|
6c903543e1 | ||
|
|
994d995b64 | ||
|
|
54d17ae044 | ||
|
|
9ea103f94e | ||
|
|
83efafa09f | ||
|
|
5f29bb9b22 | ||
|
|
441b64c3d9 | ||
|
|
cee6c59684 | ||
|
|
b1adfcf665 | ||
|
|
433abfcc80 | ||
|
|
02063302c5 | ||
|
|
b9125ae3e7 | ||
|
|
0a2b63b032 | ||
|
|
35d84a6456 | ||
|
|
0257b2db4b | ||
|
|
d3bf1e04fc | ||
|
|
530639e15f | ||
|
|
c8695f2ba6 | ||
|
|
f3bd820374 | ||
|
|
29b9fe1f0b | ||
|
|
1090895e72 | ||
|
|
e983f4a858 | ||
|
|
72e3f10d5b | ||
|
|
c5ae5ba4db | ||
|
|
a1090029f3 | ||
|
|
0135c808a0 | ||
|
|
678084fed8 | ||
|
|
705d58005d | ||
|
|
cee266046b | ||
|
|
5aa3d9c39c | ||
|
|
3ee6507dd6 | ||
|
|
425bba2f1a | ||
|
|
a2cbc46dbc | ||
|
|
8538b0c01d | ||
|
|
ff30da7385 | ||
|
|
7c5771ed11 | ||
|
|
81fb1a9b8f | ||
|
|
835bd2557e | ||
|
|
e5a8d7be49 | ||
|
|
9f795de60b | ||
|
|
7791a30bc2 | ||
|
|
2e85c83301 | ||
|
|
251190a0c4 | ||
|
|
90b85239d5 | ||
|
|
f276a8da75 | ||
|
|
93799ec641 | ||
|
|
dddc056a29 | ||
|
|
3e6d9cdc06 | ||
|
|
091786411b | ||
|
|
4af09dd506 | ||
|
|
2626bff96d | ||
|
|
9ef1d609e2 | ||
|
|
4c60deb992 | ||
|
|
53bc782278 | ||
|
|
4e087349a4 | ||
|
|
53815b725a | ||
|
|
e8c8e7b8a8 | ||
|
|
b781ce5b0f | ||
|
|
a3c3f4c3d1 | ||
|
|
445b6dfcf8 | ||
|
|
b2ef64369f | ||
|
|
a8d2ea68f5 | ||
|
|
c7a437573b | ||
|
|
5736d1e206 | ||
|
|
e110e3c3af | ||
|
|
e2d8b581db | ||
|
|
10a4de8e04 | ||
|
|
96ddbd5e17 | ||
|
|
65b530e7ec | ||
|
|
de98e3d6e5 | ||
|
|
ffcb4ee487 | ||
|
|
bfba3c9d5c | ||
|
|
37e56ea24d | ||
|
|
453e8c71ac | ||
|
|
e669fcafd0 | ||
|
|
2dbbcf3ca5 | ||
|
|
bce710eec1 | ||
|
|
64a69796e2 | ||
|
|
dd460a0eb0 | ||
|
|
475fe9977a | ||
|
|
b86e42a5aa | ||
|
|
9f04c45dea | ||
|
|
20e6b60fce | ||
|
|
5f86ee5d93 | ||
|
|
84ad509621 | ||
|
|
ea0da49acb | ||
|
|
e77fbfe8f8 | ||
|
|
d485650369 | ||
|
|
c1f22ca5cb | ||
|
|
c5d1c9ae61 | ||
|
|
d8184b37a3 | ||
|
|
bd952a552f | ||
|
|
aa171a6cc9 | ||
|
|
e4ee59741e | ||
|
|
b3b9f4d4b7 | ||
|
|
c1f2b36854 | ||
|
|
87494d2941 | ||
|
|
ad26dcfbfc | ||
|
|
5541a184d5 | ||
|
|
f1140055d0 | ||
|
|
88782fb05a | ||
|
|
69d216a88e | ||
|
|
04f0af0a28 | ||
|
|
c71c7735fd | ||
|
|
89bc483c87 | ||
|
|
62d2e8d1f4 | ||
|
|
12abc233d0 | ||
|
|
c30c5df340 | ||
|
|
4a088f717e | ||
|
|
9a10538f6d | ||
|
|
c753446353 | ||
|
|
65a15c6145 | ||
|
|
e563f84ae2 | ||
|
|
622ad1ddd7 | ||
|
|
1bd17876ed | ||
|
|
a789689709 | ||
|
|
171a2e0e31 | ||
|
|
66d3fddedf | ||
|
|
14a3b13900 | ||
|
|
40d41455db | ||
|
|
d63ead25ac | ||
|
|
4a35dec206 | ||
|
|
c294b9d3b9 | ||
|
|
057b415074 | ||
|
|
3180b28d76 | ||
|
|
b47c31509d | ||
|
|
f99a5ef2e7 | ||
|
|
690bcf5d47 | ||
|
|
1e6bef079d | ||
|
|
564155fd1a | ||
|
|
f371b6f06c | ||
|
|
0f9434fca4 | ||
|
|
235831a035 | ||
|
|
4240748cea | ||
|
|
934e34fbd6 | ||
|
|
ea42d18506 | ||
|
|
2b763ff2db | ||
|
|
c6cc97953b | ||
|
|
ff144df549 | ||
|
|
f3acf201c4 | ||
|
|
e5364ea832 | ||
|
|
53b8f91c02 | ||
|
|
a841ddd00c | ||
|
|
39455768b2 | ||
|
|
e529a454eb | ||
|
|
1b5dc396e3 | ||
|
|
15a3ac0512 | ||
|
|
52f149266f | ||
|
|
8d33c2e7c0 | ||
|
|
b3d82dc3a8 | ||
|
|
0fb44529bb | ||
|
|
6ea944bf17 | ||
|
|
8c6177c47f | ||
|
|
b65d9f1524 | ||
|
|
03e5dddf24 | ||
|
|
7bb892f7b3 | ||
|
|
66ed8ebbd9 | ||
|
|
0d326f83b6 | ||
|
|
fc0955b125 | ||
|
|
13ba1b96c3 | ||
|
|
d66d169027 | ||
|
|
6decd6aaa1 | ||
|
|
3c0ffa8652 | ||
|
|
4917e3f664 | ||
|
|
b2a14b456e | ||
|
|
ab1580a37f | ||
|
|
c1f979cd54 | ||
|
|
d124338ecb | ||
|
|
d001f14514 | ||
|
|
c43205d6de | ||
|
|
54f1af5a29 | ||
|
|
350661f027 | ||
|
|
3699df2651 | ||
|
|
63197fea3e | ||
|
|
f044194b06 | ||
|
|
29302c13e9 | ||
|
|
c4808de2ff | ||
|
|
c390a4530e | ||
|
|
be771d5d6f | ||
|
|
8b45fa089e | ||
|
|
0d04223ccd | ||
|
|
5ef222d62f | ||
|
|
6810e9ed2e | ||
|
|
a6c638d0fa | ||
|
|
fa8a512945 | ||
|
|
24b73da9e6 | ||
|
|
4447d3339c | ||
|
|
d82c9e7f2a | ||
|
|
6828a7402a | ||
|
|
a0d62a40dd | ||
|
|
712dcf6b8d | ||
|
|
ad1fc34199 | ||
|
|
ab723b25d0 | ||
|
|
016673f419 | ||
|
|
7bc6d62e9b | ||
|
|
fca9cc3e0e | ||
|
|
2a178bfbb0 | ||
|
|
3381879358 | ||
|
|
ed9058618a | ||
|
|
a4c99bad6a | ||
|
|
f31f58ff26 | ||
|
|
f84918da4b | ||
|
|
80a237e250 | ||
|
|
f52d3b26c3 | ||
|
|
2029d714a0 | ||
|
|
31ef1df74f | ||
|
|
00ae96a7cb | ||
|
|
8d2a6d6744 | ||
|
|
9443e31b1e | ||
|
|
2d8ca8af69 | ||
|
|
de4d4695c4 | ||
|
|
c8cf85223f | ||
|
|
b869538544 | ||
|
|
4710cbb281 | ||
|
|
9ae1014e55 | ||
|
|
813c0dd031 | ||
|
|
91071933d0 | ||
|
|
df5bac3e6c | ||
|
|
7b9f8abce5 | ||
|
|
a2f9d4b6a1 | ||
|
|
77e16d55c1 | ||
|
|
ecb2442566 | ||
|
|
89c0b4accf | ||
|
|
8e5b51395a | ||
|
|
c2ada0f15a | ||
|
|
6d3541c5fd | ||
|
|
31e4149067 | ||
|
|
c9fba9ec79 | ||
|
|
282627714e | ||
|
|
714dd783f9 | ||
|
|
40b390903d | ||
|
|
ce1b569b69 | ||
|
|
b539eb5aab | ||
|
|
e992e1efbd | ||
|
|
33a52dd836 | ||
|
|
b5f06fb3bc | ||
|
|
494817b616 | ||
|
|
02470a5aae | ||
|
|
42232a8ab6 | ||
|
|
cb64df45c8 | ||
|
|
a11da7bdb9 | ||
|
|
9a22ae11c6 | ||
|
|
318a7e0e30 | ||
|
|
e976f351f8 | ||
|
|
437341d40e | ||
|
|
9d7ea1a28b | ||
|
|
d85668f096 | ||
|
|
5c3a23a481 | ||
|
|
8be1f26ac6 | ||
|
|
35bd21fc64 | ||
|
|
652170fb54 | ||
|
|
d4e6c29f25 | ||
|
|
c12772e73f | ||
|
|
a26ac1dbcc | ||
|
|
2afaeba292 | ||
|
|
a14e76b98d | ||
|
|
9a3a759ed3 | ||
|
|
72f17d6961 | ||
|
|
1b967a9d98 | ||
|
|
bb954390ec | ||
|
|
bf9b6940c9 | ||
|
|
22980b9e65 | ||
|
|
483426f771 | ||
|
|
1e5b976eb7 | ||
|
|
2aa6939b96 | ||
|
|
f7e601d352 | ||
|
|
c4082931e3 | ||
|
|
cee3e5436b | ||
|
|
613fa56bfc | ||
|
|
0752d94bbf | ||
|
|
3bf1a03760 | ||
|
|
e2844e2fef | ||
|
|
2ca733bbc1 | ||
|
|
e2b6eca420 | ||
|
|
67cb19614e | ||
|
|
e464461c19 | ||
|
|
6efe88f7a1 | ||
|
|
0ce35dafe1 | ||
|
|
49e419b2df | ||
|
|
d9033d8dac | ||
|
|
517b7fb0c9 | ||
|
|
568e79a1e3 | ||
|
|
9c8846b37b | ||
|
|
737b70cbbf | ||
|
|
03d2212881 | ||
|
|
b8d10916af | ||
|
|
4fe5f35c2f | ||
|
|
cea1d8b935 | ||
|
|
e7946a3a41 | ||
|
|
5c53973220 | ||
|
|
278a38f4af | ||
|
|
39bbedf517 | ||
|
|
2153f6056d | ||
|
|
2be9b41362 | ||
|
|
f9fa024fc5 | ||
|
|
7c7ac27900 | ||
|
|
253e8b1f2a | ||
|
|
60e75c9234 | ||
|
|
fb89337b04 | ||
|
|
53f71fc4a7 | ||
|
|
12e7c1569c | ||
|
|
2eb566b884 | ||
|
|
a7444873b9 | ||
|
|
397ff11d6d | ||
|
|
285563ad01 | ||
|
|
c3111ac0b4 | ||
|
|
6505e7e02a | ||
|
|
c82889058c | ||
|
|
2f4c20567c | ||
|
|
a7d6a1188b | ||
|
|
4f5244920f | ||
|
|
feecb60b9e | ||
|
|
4f18cab8d2 | ||
|
|
e6f1b4e63a | ||
|
|
c458e985af | ||
|
|
5f234e16d0 | ||
|
|
9001e9328a | ||
|
|
b237ee3689 | ||
|
|
aa911eca40 | ||
|
|
6362c615f5 | ||
|
|
544be90469 | ||
|
|
56a1663cd9 | ||
|
|
f9a46d61fa | ||
|
|
a81451ba1f | ||
|
|
b11e370888 | ||
|
|
54ee7d4165 | ||
|
|
15efcbe042 | ||
|
|
7c5fbee327 | ||
|
|
b19c4cdcf6 | ||
|
|
3212cf86f4 | ||
|
|
fbceae7773 | ||
|
|
b921d1a920 | ||
|
|
8128b549a5 | ||
|
|
7405d95035 | ||
|
|
a04b12a3ef | ||
|
|
cbf8f2326d | ||
|
|
297874bfed | ||
|
|
74398d74ac | ||
|
|
cef9c36183 | ||
|
|
5e7430975a | ||
|
|
1456f6dba1 | ||
|
|
daf74a60ca | ||
|
|
87df95c097 | ||
|
|
9b49576875 | ||
|
|
065cbf79fc | ||
|
|
09b89e87a4 | ||
|
|
ddab6156a6 | ||
|
|
10cdfff0d1 | ||
|
|
3328416976 | ||
|
|
094a621f3c | ||
|
|
87ce5d8ccb | ||
|
|
bcdc92e25f | ||
|
|
a323fab135 | ||
|
|
d42031b075 | ||
|
|
efbb18aa25 | ||
|
|
8a430f89b3 | ||
|
|
aeaa922eef | ||
|
|
a6d5a34be3 | ||
|
|
ba79542f3c | ||
|
|
dc10c8a1ed | ||
|
|
5ab814505e | ||
|
|
1d8bdcfc04 | ||
|
|
95cf341b50 | ||
|
|
a134485b1b | ||
|
|
d39edeb9a1 | ||
|
|
453fb27be2 | ||
|
|
831f04fb7d | ||
|
|
04044a9744 | ||
|
|
8077285a63 | ||
|
|
537926c1a7 | ||
|
|
02ff3d7b1e | ||
|
|
491cb278f3 | ||
|
|
ed1ebefd8f | ||
|
|
36d64fcbd4 | ||
|
|
c5cdc2c0a2 | ||
|
|
0eca86f64f | ||
|
|
50027d76a5 | ||
|
|
b4748de5a9 | ||
|
|
8f2532c624 | ||
|
|
0726513334 | ||
|
|
f951f38883 | ||
|
|
5262412c13 | ||
|
|
f022b93249 | ||
|
|
60b5e98182 | ||
|
|
682acae9fd | ||
|
|
c0f80e9117 | ||
|
|
dcf13af459 | ||
|
|
a2e4fb6b95 | ||
|
|
9dd92f493a | ||
|
|
b23e832002 | ||
|
|
ae2f626168 | ||
|
|
a73930da81 | ||
|
|
921d446196 | ||
|
|
8e2ea5de9d | ||
|
|
fd0baca222 | ||
|
|
d2fc0d6a35 | ||
|
|
eedc9e0eaf | ||
|
|
6b85f6b405 | ||
|
|
5686a6b928 | ||
|
|
ad665c6af1 | ||
|
|
d78d6db61e | ||
|
|
f47c307bf4 | ||
|
|
5b4edb9499 | ||
|
|
a6e6093922 | ||
|
|
2e8b4e660e | ||
|
|
0ca1ee8b91 | ||
|
|
a322672259 | ||
|
|
6cab86d0c1 | ||
|
|
86e7e2e070 | ||
|
|
69fca439f4 | ||
|
|
3b90fb589f | ||
|
|
fff126204c | ||
|
|
98e626cf67 | ||
|
|
d8fe628a95 | ||
|
|
4c378840e3 | ||
|
|
18de6a480b | ||
|
|
c6da4d586b | ||
|
|
61d6fc70e8 | ||
|
|
7c65655c7e | ||
|
|
76ca264b72 | ||
|
|
aa58d3c170 | ||
|
|
a32b898a00 | ||
|
|
6fcd43ee64 | ||
|
|
f1f9f00d43 | ||
|
|
8ff27f9257 | ||
|
|
78c62532c7 | ||
|
|
a7f327dced | ||
|
|
310c435396 | ||
|
|
fa3f27e8e7 | ||
|
|
6b0fefff29 | ||
|
|
f613316282 | ||
|
|
1b5b74390f | ||
|
|
b57f88cb89 | ||
|
|
03afc2a1e6 | ||
|
|
1f6ed9324d | ||
|
|
5559772afa | ||
|
|
8728631fe0 | ||
|
|
e34d9cbe5f | ||
|
|
0efba09990 | ||
|
|
a9cb80d792 | ||
|
|
dae6fe711c | ||
|
|
c9a24bc6c5 | ||
|
|
00663f29a9 | ||
|
|
15a48990b6 | ||
|
|
af0b898c2e | ||
|
|
ddf8384bc6 | ||
|
|
670f92f42b | ||
|
|
c81b0e3d2a | ||
|
|
f56d804d85 | ||
|
|
b57f08f22b | ||
|
|
34f3b8fdd0 | ||
|
|
0b3b49b4e0 | ||
|
|
fa96422702 | ||
|
|
e12168ed24 | ||
|
|
c0f2df8e0a | ||
|
|
8807ade98f | ||
|
|
13356ddbcc | ||
|
|
974033be80 | ||
|
|
8755fc7291 | ||
|
|
17c02fe759 | ||
|
|
4c7d18a772 | ||
|
|
b28b26c39a | ||
|
|
23aed605ec | ||
|
|
fdb8d565aa | ||
|
|
9b08296236 | ||
|
|
c82d8c63fa | ||
|
|
7a8989bbfc | ||
|
|
22c86074c8 | ||
|
|
ef9e449322 | ||
|
|
6b73195478 | ||
|
|
c7b9bf6a77 | ||
|
|
a84c91b259 | ||
|
|
e6566dfd67 | ||
|
|
d6419f32b8 | ||
|
|
60ed682577 | ||
|
|
6c1fa8c30b | ||
|
|
09167fe8ac | ||
|
|
eb7951818d | ||
|
|
6959656d51 | ||
|
|
f916b50491 | ||
|
|
7160e1d3e7 | ||
|
|
16369d50a7 | ||
|
|
35fc371222 | ||
|
|
4bbf4a5e79 | ||
|
|
e5bd79b011 | ||
|
|
2267b40bda | ||
|
|
b0b6016e12 | ||
|
|
c24265fe7e | ||
|
|
9d0102ac89 | ||
|
|
52b8b3ed8d | ||
|
|
380030c59a | ||
|
|
867a813328 | ||
|
|
ded3fa50a3 | ||
|
|
84653e8d9f | ||
|
|
ac0fd7138f | ||
|
|
c0f9f47b8c | ||
|
|
7e9d24a145 |
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
@@ -5,13 +5,10 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
# Requirements to run style checks and build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
directories:
|
||||
- "/.github/workflows/requirements/style/*"
|
||||
- "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
8
.github/workflows/audit.yaml
vendored
8
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -44,6 +44,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit configs
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
@@ -52,6 +53,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit configs
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
@@ -59,6 +61,8 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
|
||||
17
.github/workflows/bootstrap.yml
vendored
17
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
@@ -60,10 +60,10 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
@@ -71,12 +71,14 @@ jobs:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
@@ -94,7 +96,7 @@ jobs:
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
@@ -123,10 +125,10 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -152,7 +154,7 @@ jobs:
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
@@ -166,4 +168,3 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
21
.github/workflows/build-containers.yml
vendored
21
.github/workflows/build-containers.yml
vendored
@@ -40,8 +40,7 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
@@ -56,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -77,7 +76,7 @@ jobs:
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
run: |
|
||||
.github/workflows/generate_spack_yaml_containerize.sh
|
||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
@@ -88,19 +87,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -108,13 +107,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -127,7 +126,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
||||
9
.github/workflows/ci.yaml
vendored
9
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -53,6 +53,13 @@ jobs:
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
|
||||
8
.github/workflows/install_spack.sh
vendored
8
.github/workflows/install_spack.sh
vendored
@@ -1,8 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
. share/spack/setup-env.sh
|
||||
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||
spack config add "packages:all:target:[x86_64]"
|
||||
spack compiler find
|
||||
spack compiler info apple-clang
|
||||
spack debug report
|
||||
spack solve zlib
|
||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
black==24.4.2
|
||||
black==24.8.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.0
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20240513
|
||||
48
.github/workflows/unit_tests.yaml
vendored
48
.github/workflows/unit_tests.yaml
vendored
@@ -16,45 +16,34 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
concretizer: clingo
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -72,7 +61,7 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
@@ -85,7 +74,6 @@ jobs:
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
@@ -100,10 +88,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -118,7 +106,7 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
@@ -141,13 +129,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
@@ -160,10 +148,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -178,11 +166,10 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
@@ -198,10 +185,10 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -213,11 +200,10 @@ jobs:
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
@@ -236,10 +222,10 @@ jobs:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -247,7 +233,7 @@ jobs:
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
./.github/workflows/bin/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
|
||||
20
.github/workflows/valid-style.yml
vendored
20
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,22 +35,22 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
@@ -70,13 +70,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap Spack development environment
|
||||
@@ -85,5 +85,5 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
|
||||
321
CHANGELOG.md
321
CHANGELOG.md
@@ -1,3 +1,324 @@
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
`v0.22.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **Compiler dependencies**
|
||||
|
||||
We are in the process of making compilers proper dependencies in Spack, and a number
|
||||
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
||||
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
||||
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
||||
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
||||
other externals. We are trying to do this with the least disruption possible, so
|
||||
your existing `compilers.yaml` files should still work. We expect to be done with
|
||||
this transition by the `v0.23` release in November.
|
||||
|
||||
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
||||
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
||||
libraries. This enables gcc runtime libraries to be installed and relocated when
|
||||
using a build cache. When building minimal Spack-generated container images it is
|
||||
no longer necessary to install libgfortran, libgomp etc. using the system package
|
||||
manager.
|
||||
|
||||
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
||||
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
||||
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
||||
packages. This allows us to model library soname compatibility and allows
|
||||
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
||||
provided by standalone libraries). Note that until we have an agreement in place
|
||||
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
||||
and must be downloaded outside of Spack.
|
||||
|
||||
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
||||
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
||||
not try to strictly match compilers or targets of reused specs. Users can still
|
||||
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
||||
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
||||
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
||||
|
||||
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
||||
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
||||
cache, we do not require you to also have a local configured compiler to *use* the
|
||||
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
||||
updates that happen underneath Spack.
|
||||
|
||||
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
||||
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
||||
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
||||
with a `libc` with the same name and a version less than or equal to that of the
|
||||
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
||||
|
||||
* #43464: each package that can provide a compiler is now detectable using `spack
|
||||
external find`. External packages defining compiler paths are effectively used as
|
||||
compilers, and `spack external find -t compiler` can be used as a substitute for
|
||||
`spack compiler find`. More details on this transition are in
|
||||
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
||||
|
||||
2. **Improved `spack find` UI for Environments**
|
||||
|
||||
If you're working in an enviroment, you likely care about:
|
||||
|
||||
* What are the roots
|
||||
* Which ones are installed / not installed
|
||||
* What's been added that still needs to be concretized
|
||||
|
||||
We've tweaked `spack find` in environments to show this information much more
|
||||
clearly. Installation status is shown next to each root, so you can see what is
|
||||
installed. Roots are also shown in bold in the list of installed packages. There is
|
||||
also a new option for `spack find -r` / `--only-roots` that will only show env
|
||||
roots, if you don't want to look at all the installed specs.
|
||||
|
||||
More details in #42334.
|
||||
|
||||
3. **Improved command-line string quoting**
|
||||
|
||||
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
||||
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
||||
something like this on the command line:
|
||||
|
||||
```
|
||||
spack install zlib cflags=\"-O2 -g\"
|
||||
```
|
||||
|
||||
That will now result in an error, but you can now write what you probably expected
|
||||
to work in the first place:
|
||||
|
||||
```
|
||||
spack install zlib cflags="-O2 -g"
|
||||
```
|
||||
|
||||
Quoted can also now include special characters, so you can supply flags like:
|
||||
|
||||
```
|
||||
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||
```
|
||||
|
||||
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
||||
and `==` when for flags or variants. This would not have broken before but will now
|
||||
result in an error:
|
||||
|
||||
```
|
||||
spack install zlib cflags = "-O2 -g"
|
||||
```
|
||||
|
||||
More details and discussion in #30634.
|
||||
|
||||
4. **Revert default `spack install` behavior to `--reuse`**
|
||||
|
||||
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
||||
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
||||
attempt to build a new version of the requested package / any environment roots.
|
||||
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
||||
think it should be the default for a package manager.
|
||||
|
||||
We are going to try to stick to this policy:
|
||||
1. Prioritize reuse and build as little as possible by default.
|
||||
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
||||
is a known security issue that necessitates an upgrade.
|
||||
|
||||
With the install command you now have three options:
|
||||
|
||||
* `--reuse` (default): reuse as many existing installations as possible.
|
||||
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
||||
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
||||
|
||||
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
||||
that it may give you fresh versions. More details in #41302 and #43988.
|
||||
|
||||
5. **More control over reused specs**
|
||||
|
||||
You can now control which packages to reuse and how. There is a new
|
||||
`concretizer:reuse` config option, which accepts the following properties:
|
||||
|
||||
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
||||
- `exclude`: list of constraints used to select which specs *not* to reuse
|
||||
- `include`: list of constraints used to select which specs *to* reuse
|
||||
- `from`: list of sources for reused specs (some combination of `local`,
|
||||
`buildcache`, or `external`)
|
||||
|
||||
For example, to reuse only specs compiled with GCC, you could write:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
```
|
||||
|
||||
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
exclude: ["openmpi"]
|
||||
- type: buildcache
|
||||
exclude: ["openmpi"]
|
||||
- type: external
|
||||
include: ["openmpi"]
|
||||
```
|
||||
|
||||
6. **New `redistribute()` directive**
|
||||
|
||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
||||
way to say that in a package.
|
||||
|
||||
Now there is a `redistribute()` directive so that package authors can write:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
redistribute(source=False, binary=False)
|
||||
```
|
||||
|
||||
Like other directives, this works with `when=`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
# 12.0 and higher are proprietary
|
||||
redistribute(source=False, binary=False, when="@12.0:")
|
||||
|
||||
# can't redistribute when we depend on some proprietary dependency
|
||||
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
||||
```
|
||||
|
||||
More in #20185.
|
||||
|
||||
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
||||
|
||||
Previously, you could express conflicts and preferences in `packages.yaml` through
|
||||
some contortions with `require:`:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
require:
|
||||
- one_of: ["%clang", "@:"] # conflict on %clang
|
||||
- any_of: ["+shared", "@:"] # strong preference for +shared
|
||||
```
|
||||
|
||||
You can now use `require:` and `prefer:` for a much more readable configuration:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
conflict:
|
||||
- "%clang"
|
||||
prefer:
|
||||
- "+shared"
|
||||
```
|
||||
|
||||
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
||||
and #41832 for more details.
|
||||
|
||||
8. **`include_concrete` in environments**
|
||||
|
||||
You may want to build on the *concrete* contents of another environment without
|
||||
changing that environment. You can now include the concrete specs from another
|
||||
environment's `spack.lock` with `include_concrete`:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /path/to/environment1
|
||||
- /path/to/environment2
|
||||
```
|
||||
|
||||
Now, when *this* environment is concretized, it will bring in the already concrete
|
||||
specs from `environment1` and `environment2`, and build on top of them without
|
||||
changing them. This is useful if you have phased deployments, where old deployments
|
||||
should not be modified but you want to use as many of them as possible. More details
|
||||
in #33768.
|
||||
|
||||
9. **`python-venv` isolation**
|
||||
|
||||
Spack has unique requirements for Python because it:
|
||||
1. installs every package in its own independent directory, and
|
||||
2. allows users to register *external* python installations.
|
||||
|
||||
External installations may contain their own installed packages that can interfere
|
||||
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
||||
`sysconfig` in ways that alter the installation layout of installed Python packages
|
||||
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
||||
from these and other issues, we now insert a small `python-venv` package in between
|
||||
`python` and packages that need to install Python code. This isolates Spack's build
|
||||
environment, isolates Spack from any issues with an external python, and resolves a
|
||||
large number of issues we've had with Python installations.
|
||||
|
||||
See #40773 for further details.
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||
* `spack find`: add options for local/upstream only (#42999)
|
||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||
* `patch`: support reversing patches (#43040)
|
||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||
* directives: add `checked_by` field to `license()`, add some license checks
|
||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||
* Add `--create` to `spack env activate` (#40896)
|
||||
|
||||
## Performance improvements
|
||||
|
||||
* environment.py: fix excessive re-reads (#43746)
|
||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||
* Refactor to improve `spec format` speed (#43712)
|
||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||
* spec.py: early return in `__str__`
|
||||
* avoid `jinja2` import at startup unless needed (#43237)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||
`neoverse-v2` detection.
|
||||
* `spack config get`/`blame`: with no args, show entire config
|
||||
* `spack env create <env>`: dir if dir-like (#44024)
|
||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||
* Add ability to rename environments (#43296)
|
||||
* Add config option and compiler support to reuse across OS's (#42693)
|
||||
* Support for prereleases (#43140)
|
||||
* Only reuse externals when configured (#41707)
|
||||
* Environments: Add support for including views (#42250)
|
||||
|
||||
## Binary caches
|
||||
* Build cache: make signed/unsigned a mirror property (#41507)
|
||||
* tools stack
|
||||
|
||||
## Removals, deprecations, and syntax changes
|
||||
* remove `dpcpp` compiler and package (#43418)
|
||||
* spack load: remove --only argument (#42120)
|
||||
|
||||
## Notable Bugfixes
|
||||
* repo.py: drop deleted packages from provider cache (#43779)
|
||||
* Allow `+` in module file names (#41999)
|
||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||
* Show extension commands with spack -h (#41726)
|
||||
* Support environment variable expansion inside module projections (#42917)
|
||||
* Alert user to failed concretizations (#42655)
|
||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||
* spack mirror create --all: include patches (#41579)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,994 total packages; 525 since `v0.21.0`
|
||||
* 178 new Python packages, 5 new R packages
|
||||
* 358 people contributed to this release
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
@@ -188,25 +188,27 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
if NOT defined _sp_args (
|
||||
exit /B 0
|
||||
)
|
||||
|
||||
:: If args contain --bat, or -h/--help: just execute.
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
||||
goto :default_case
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
||||
) do %%I
|
||||
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
||||
@@ -170,23 +170,6 @@ config:
|
||||
# If set to true, Spack will use ccache to cache C compiles.
|
||||
ccache: false
|
||||
|
||||
|
||||
# The concretization algorithm to use in Spack. Options are:
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs. This will soon be deprecated in
|
||||
# favor of clingo.
|
||||
#
|
||||
# See `concretizer.yaml` for more settings you can fine-tune when
|
||||
# using clingo.
|
||||
concretizer: clingo
|
||||
|
||||
|
||||
# How long to wait to lock the Spack installation database. This lock is used
|
||||
# when Spack needs to manage its own package metadata and all operations are
|
||||
# expected to complete within the default time limit. The timeout should
|
||||
|
||||
@@ -20,11 +20,14 @@ packages:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -61,6 +64,7 @@ packages:
|
||||
tbb: [intel-tbb]
|
||||
unwind: [libunwind]
|
||||
uuid: [util-linux-uuid, libuuid]
|
||||
wasi-sdk: [wasi-sdk-prebuilt]
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: clingo
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
||||
@@ -206,6 +206,7 @@ def setup(sphinx):
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
("py:class", "concurrent.futures._base.Executor"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
|
||||
@@ -203,12 +203,9 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - CentOS Stream9
|
||||
- ``quay.io/centos/centos:stream9``
|
||||
- ``spack/centos-stream9``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
|
||||
@@ -931,32 +931,84 @@ This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
-----------------
|
||||
Environment Views
|
||||
-----------------
|
||||
|
||||
Spack Environments can define filesystem views, which provide a direct access point
|
||||
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||
Filesystem views are updated every time the environment is written out to the lock
|
||||
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||
The files of the view's installed packages are brought into the view by symbolic or
|
||||
hard links, referencing the original Spack installation, or by copy.
|
||||
Spack Environments can have an associated filesystem view, which is a directory
|
||||
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
|
||||
in which all files of the installed packages are linked.
|
||||
|
||||
By default a view is created for each environment, thanks to the ``view: true``
|
||||
option in the ``spack.yaml`` manifest file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: [perl, python]
|
||||
view: true
|
||||
|
||||
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
|
||||
If you've used ``spack env activate``, you may have already interacted with this view. Spack
|
||||
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
|
||||
you can directly run executables from all installed packages in the environment.
|
||||
|
||||
Views are highly customizable: you can control where they are put, modify their structure,
|
||||
include and exclude specs, change how files are linked, and you can even generate multiple
|
||||
views for a single environment.
|
||||
|
||||
.. _configuring_environment_views:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration in ``spack.yaml``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Minimal view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||
by a name. Any number of views may be defined under the ``view`` heading.
|
||||
The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
The minimal configuration
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: true
|
||||
|
||||
lets Spack generate a single view with default settings under the
|
||||
``.spack-env/view`` directory of the environment.
|
||||
|
||||
Another short way to configure a view is to specify just where to put it:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
Views can also be disabled by setting ``view: false``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Advanced view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
One or more **view descriptors** can be defined under ``view``, keyed by a name.
|
||||
The example from the previous section with ``view: /path/to/view`` is equivalent
|
||||
to defining a view descriptor named ``default`` with a ``root`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default: # name of the view
|
||||
root: /path/to/view # view descriptor attribute
|
||||
|
||||
The ``default`` view descriptor name is special: when you ``spack env activate`` your
|
||||
environment, this view will be used to update (among other things) your ``PATH``
|
||||
variable.
|
||||
|
||||
View descriptors must contain the root of the view, and optionally projections,
|
||||
``select`` and ``exclude`` lists and link information via ``link`` and
|
||||
``link_type``.
|
||||
|
||||
For example, in the following manifest
|
||||
As a more advanced example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
@@ -1001,59 +1053,10 @@ of ``hardlink`` or ``copy``.
|
||||
when the environment is not activated, and linked libraries will be located
|
||||
*outside* of the view thanks to rpaths.
|
||||
|
||||
|
||||
There are two shorthands for environments with a single view. If the
|
||||
environment at ``/path/to/env`` has a single view, with a root at
|
||||
``/path/to/env/.spack-env/view``, with default selection and exclusion
|
||||
and the default projection, we can put ``view: True`` in the
|
||||
environment manifest. Similarly, if the environment has a view with a
|
||||
different root, but default selection, exclusion, and projections, the
|
||||
manifest can say ``view: /path/to/view``. These views are
|
||||
automatically named ``default``, so that
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: True
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: .spack-env/view
|
||||
|
||||
and
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: /path/to/view
|
||||
|
||||
By default, Spack environments are configured with ``view: True`` in
|
||||
the manifest. Environments can be configured without views using
|
||||
``view: False``. For backwards compatibility reasons, environments
|
||||
with no ``view`` key are treated the same as ``view: True``.
|
||||
|
||||
From the command line, the ``spack env create`` command takes an
|
||||
argument ``--with-view [PATH]`` that sets the path for a single, default
|
||||
view. If no path is specified, the default path is used (``view:
|
||||
True``). The argument ``--without-view`` can be used to create an
|
||||
true``). The argument ``--without-view`` can be used to create an
|
||||
environment without any view configured.
|
||||
|
||||
The ``spack env view`` command can be used to change the manage views
|
||||
@@ -1119,11 +1122,18 @@ the projection under ``all`` before reaching those entries.
|
||||
Activating environment views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack env activate`` command will put the default view for the
|
||||
environment into the user's path, in addition to activating the
|
||||
environment for Spack commands. The arguments ``-v,--with-view`` and
|
||||
``-V,--without-view`` can be used to tune this behavior. The default
|
||||
behavior is to activate with the environment view if there is one.
|
||||
The ``spack env activate <env>`` has two effects:
|
||||
|
||||
1. It activates the environment so that further Spack commands such
|
||||
as ``spack install`` will run in the context of the environment.
|
||||
2. It activates the view so that environment variables such as
|
||||
``PATH`` are updated to include the view.
|
||||
|
||||
Without further arguments, the ``default`` view of the environment is
|
||||
activated. If a view with a different name has to be activated,
|
||||
``spack env activate --with-view <name> <env>`` can be
|
||||
used instead. You can also activate the environment without modifying
|
||||
further environment variables using ``--without-view``.
|
||||
|
||||
The environment variables affected by the ``spack env activate``
|
||||
command and the paths that are used to update them are determined by
|
||||
@@ -1146,8 +1156,8 @@ relevant variable if the path exists. For this reason, it is not
|
||||
recommended to use non-default projections with the default view of an
|
||||
environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
The ``spack env deactivate`` command will remove the active view of
|
||||
the Spack environment from the user's environment variables.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
@@ -1306,7 +1316,7 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
|
||||
@@ -1263,6 +1263,11 @@ Git fetching supports the following parameters to ``version``:
|
||||
option ``--depth 1`` will be used if the version of git and the specified
|
||||
transport protocol support it, and ``--single-branch`` will be used if the
|
||||
version of git supports it.
|
||||
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1361,6 +1366,41 @@ Submodules
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
If ``git_sparse_paths`` is supplied and the git version is too old
|
||||
then a warning will be issued and that package will use the standard cloning operations instead.
|
||||
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
|
||||
or a more complex package attribute using the ``@property`` decorator. The return value should be
|
||||
a list for a callable implementation of ``git_sparse_paths``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def sparse_path_function(package)
|
||||
"""a callable function that can be used in side a version"""
|
||||
# paths can be directories or functions, all subdirectories and files are included
|
||||
paths = ["doe", "rae", "me/file.cpp"]
|
||||
if package.spec.version > Version("1.2.0"):
|
||||
paths.extend(["fae"])
|
||||
return paths
|
||||
|
||||
class MyPackage(package):
|
||||
# can also be a package attribute that will be used if not specified in versions
|
||||
git_sparse_paths = ["doe", "rae"]
|
||||
|
||||
# use the package attribute
|
||||
version("1.0.0")
|
||||
version("1.1.0")
|
||||
# use the function
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.0", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
^^^^^^
|
||||
@@ -2344,10 +2384,10 @@ you set ``parallel`` to ``False`` at the package level, then each call
|
||||
to ``make()`` will be sequential by default, but packagers can call
|
||||
``make(parallel=True)`` to override it.
|
||||
|
||||
Note that the `--jobs` option works out of the box for all standard
|
||||
Note that the ``--jobs`` option works out of the box for all standard
|
||||
build systems. If you are using a non-standard build system instead, you
|
||||
can use the variable `make_jobs` to extract the number of jobs specified
|
||||
by the `--jobs` option:
|
||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
||||
by the ``--jobs`` option:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
@@ -5589,7 +5629,8 @@ compiler configuration. This is accomplished by setting the package's
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
|
||||
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
|
||||
|
||||
Be sure to add the property at the top of the package class under other
|
||||
properties like the ``homepage``.
|
||||
|
||||
@@ -253,17 +253,6 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
||||
spack has a way to know for certain about the status of any concrete spec on
|
||||
the remote mirror, but can slow down pipeline generation significantly.
|
||||
|
||||
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||
document through a series of optimization passes designed to reduce the size
|
||||
of the generated file.
|
||||
|
||||
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||
is that before any job can begin, all jobs in previous stages must first
|
||||
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||
when using ``dependencies`` instead of ``needs``.
|
||||
|
||||
The optional ``--output-file`` argument should be an absolute path (including
|
||||
file name) to the generated pipeline, and if not given, the default is
|
||||
``./.gitlab-ci.yml``.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.6
|
||||
sphinx==7.4.7
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.0
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.2
|
||||
pytest==8.2.2
|
||||
pytest==8.3.2
|
||||
isort==5.13.2
|
||||
black==24.4.2
|
||||
flake8==7.1.0
|
||||
mypy==1.10.0
|
||||
black==24.8.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
|
||||
96
lib/spack/env/cc
vendored
96
lib/spack/env/cc
vendored
@@ -174,6 +174,46 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
execute() {
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
exit
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
@@ -231,12 +271,17 @@ fi
|
||||
# ld link
|
||||
# ccld compile & link
|
||||
|
||||
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
|
||||
# including version checks (SPACK_XFLAGS variants are not applied
|
||||
# for version checks).
|
||||
command="${0##*/}"
|
||||
comp="CC"
|
||||
vcheck_flags=""
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CPPFLAGS}"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
|
||||
command="$SPACK_CC"
|
||||
@@ -244,6 +289,7 @@ case "$command" in
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CFLAGS}"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
@@ -251,6 +297,7 @@ case "$command" in
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CXXFLAGS}"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
|
||||
command="$SPACK_FC"
|
||||
@@ -258,6 +305,7 @@ case "$command" in
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
@@ -265,6 +313,7 @@ case "$command" in
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
@@ -365,7 +414,11 @@ unset IFS
|
||||
export PATH="$new_dirs"
|
||||
|
||||
if [ "$mode" = vcheck ]; then
|
||||
exec "${command}" "$@"
|
||||
full_command_list="$command"
|
||||
args="$@"
|
||||
extend full_command_list vcheck_flags
|
||||
extend full_command_list args
|
||||
execute
|
||||
fi
|
||||
|
||||
# Darwin's linker has a -r argument that merges object files together.
|
||||
@@ -722,6 +775,7 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend spack_flags_list SPACK_ALWAYS_FFLAGS
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -731,6 +785,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend spack_flags_list SPACK_ALWAYS_CPPFLAGS
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -741,9 +796,11 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend spack_flags_list SPACK_ALWAYS_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -933,39 +990,4 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
esac
|
||||
fi
|
||||
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
execute
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
||||
12
lib/spack/external/archspec/cpu/detect.py
vendored
12
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,7 +47,11 @@ def decorator(factory):
|
||||
|
||||
|
||||
def partial_uarch(
|
||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||
name: str = "",
|
||||
vendor: str = "",
|
||||
features: Optional[Set[str]] = None,
|
||||
generation: int = 0,
|
||||
cpu_part: str = "",
|
||||
) -> Microarchitecture:
|
||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||
return Microarchitecture(
|
||||
@@ -57,6 +61,7 @@ def partial_uarch(
|
||||
features=features or set(),
|
||||
compilers={},
|
||||
generation=generation,
|
||||
cpu_part=cpu_part,
|
||||
)
|
||||
|
||||
|
||||
@@ -90,6 +95,7 @@ def proc_cpuinfo() -> Microarchitecture:
|
||||
return partial_uarch(
|
||||
vendor=_canonicalize_aarch64_vendor(data),
|
||||
features=_feature_set(data, key="Features"),
|
||||
cpu_part=data.get("CPU part", ""),
|
||||
)
|
||||
|
||||
if architecture in (PPC64LE, PPC64):
|
||||
@@ -345,6 +351,10 @@ def sorting_fn(item):
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
||||
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
||||
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Types and functions to manage information
|
||||
on CPU microarchitectures.
|
||||
"""
|
||||
"""Types and functions to manage information on CPU microarchitectures."""
|
||||
import functools
|
||||
import platform
|
||||
import re
|
||||
@@ -65,21 +63,24 @@ class Microarchitecture:
|
||||
passed in as argument above.
|
||||
* versions: versions that support this micro-architecture.
|
||||
|
||||
generation (int): generation of the micro-architecture, if
|
||||
relevant.
|
||||
generation (int): generation of the micro-architecture, if relevant.
|
||||
cpu_part (str): cpu part of the architecture, if relevant.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||
#: Aliases for micro-architecture's features
|
||||
feature_aliases = FEATURE_ALIASES
|
||||
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
||||
self.name = name
|
||||
self.parents = parents
|
||||
self.vendor = vendor
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
# Only relevant for PowerPC
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@@ -111,6 +112,7 @@ def __eq__(self, other):
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
@coerce_target_names
|
||||
@@ -143,7 +145,8 @@ def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = (
|
||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
||||
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
||||
"cpu_part={0.cpu_part!r})"
|
||||
)
|
||||
return fmt.format(self)
|
||||
|
||||
@@ -190,6 +193,7 @@ def to_dict(self):
|
||||
"generation": self.generation,
|
||||
"parents": [str(x) for x in self.parents],
|
||||
"compilers": self.compilers,
|
||||
"cpupart": self.cpu_part,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -202,6 +206,7 @@ def from_dict(data) -> "Microarchitecture":
|
||||
features=set(data["features"]),
|
||||
compilers=data.get("compilers", {}),
|
||||
generation=data.get("generation", 0),
|
||||
cpu_part=data.get("cpupart", ""),
|
||||
)
|
||||
|
||||
def optimization_flags(self, compiler, version):
|
||||
@@ -360,8 +365,11 @@ def fill_target_from_dict(name, data, targets):
|
||||
features = set(values["features"])
|
||||
compilers = values.get("compilers", {})
|
||||
generation = values.get("generation", 0)
|
||||
cpu_part = values.get("cpupart", "")
|
||||
|
||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||
targets[name] = Microarchitecture(
|
||||
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
||||
)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
|
||||
@@ -2225,10 +2225,14 @@
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:",
|
||||
"versions": "21.11:23.8",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
||||
},
|
||||
{
|
||||
"versions": "23.9:",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -2711,7 +2715,8 @@
|
||||
"flags": "-mcpu=thunderx2t99"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x0af"
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["armv8.2a"],
|
||||
@@ -2779,7 +2784,8 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x001"
|
||||
},
|
||||
"cortex_a72": {
|
||||
"from": ["aarch64"],
|
||||
@@ -2816,7 +2822,8 @@
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd08"
|
||||
},
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
@@ -2902,7 +2909,8 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd0c"
|
||||
},
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
@@ -2926,8 +2934,6 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
@@ -3028,7 +3034,8 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd40"
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
@@ -3052,13 +3059,10 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
@@ -3066,18 +3070,12 @@
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh"
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -3102,15 +3100,19 @@
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.99",
|
||||
"versions": "10.0:11.3.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.4:11.99",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.99",
|
||||
"versions": "12.0:12.2.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"versions": "12.3:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
@@ -3145,7 +3147,113 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd4f"
|
||||
},
|
||||
"neoverse_n2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd49"
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
@@ -3211,7 +3319,8 @@
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x022"
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
@@ -3289,7 +3398,8 @@
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x032"
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
|
||||
@@ -52,6 +52,9 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpupart": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -107,4 +110,4 @@
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1624,6 +1624,12 @@ def remove_linked_tree(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
# Adding this prefix allows shutil to remove long paths on windows
|
||||
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
|
||||
long_path_pfx = "\\\\?\\"
|
||||
if not path.startswith(long_path_pfx):
|
||||
path = long_path_pfx + path
|
||||
shutil.rmtree(path, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -33,8 +33,23 @@
|
||||
pass
|
||||
|
||||
|
||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
||||
csi_pre = f"{esc}{lbracket}"
|
||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
||||
# General ansi escape sequences have well-defined prefixes,
|
||||
# but content and suffixes are less reliable.
|
||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
||||
esc_pre = f"{esc}[@-_]"
|
||||
esc_content = f"[^{esc}{bell}{newline}]"
|
||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
|
||||
@@ -351,6 +351,22 @@ def _wrongly_named_spec(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _ensure_all_virtual_packages_have_default_providers(error_cls):
|
||||
"""All virtual packages must have a default provider explicitly set."""
|
||||
configuration = spack.config.create()
|
||||
defaults = configuration.get("packages", scope="defaults")
|
||||
default_providers = defaults["all"]["providers"]
|
||||
virtuals = spack.repo.PATH.provider_index.providers
|
||||
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
|
||||
|
||||
return [
|
||||
error_cls(f"'{virtual}' must have a default provider in {default_providers_filename}", [])
|
||||
for virtual in virtuals
|
||||
if virtual not in default_providers
|
||||
]
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
@@ -791,7 +807,7 @@ def check_virtual_with_variants(spec, msg):
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
||||
)
|
||||
errors.append(error)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
154
lib/spack/spack/bootstrap/clingo.py
Normal file
154
lib/spack/spack/bootstrap/clingo.py
Normal file
@@ -0,0 +1,154 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap concrete specs for clingo
|
||||
|
||||
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
|
||||
we need to rely on another mechanism to get a concrete spec that fits the current host.
|
||||
|
||||
This module contains the logic to get a concrete spec for clingo, starting from a prototype
|
||||
JSON file for a similar platform.
|
||||
"""
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
if str(self.host_platform) == "linux":
|
||||
self.host_libc = self.libc_external_spec()
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
compiler_name = "apple-clang"
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "clang"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
self, configuration: "spack.config.Configuration"
|
||||
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
|
||||
packages_yaml = configuration.get("packages")
|
||||
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
|
||||
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
|
||||
for pkg_name in ["cmake", "bison"]:
|
||||
if pkg_name not in packages_yaml:
|
||||
continue
|
||||
|
||||
candidates = packages_yaml[pkg_name].get("externals", [])
|
||||
for candidate in candidates:
|
||||
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
selected[pkg_name] = self._external_spec(s)
|
||||
break
|
||||
return selected["cmake"], selected["bison"]
|
||||
|
||||
def prototype_path(self) -> pathlib.Path:
|
||||
"""Path to a prototype concrete specfile for clingo"""
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
|
||||
if str(self.host_platform) == "linux":
|
||||
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
|
||||
if not result.exists():
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
|
||||
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
|
||||
|
||||
elif not result.exists():
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
|
||||
return result
|
||||
|
||||
def concretize(self) -> "spack.spec.Spec":
|
||||
# Read the prototype and mark it NOT concrete
|
||||
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
|
||||
s._mark_concrete(False)
|
||||
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
edge.spec = self.host_python
|
||||
|
||||
if edge.spec.name == "bison" and self.external_bison:
|
||||
edge.spec = self.external_bison
|
||||
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
s._finalize_concretization()
|
||||
|
||||
# Work around the fact that the installer calls Spec.dependents() and
|
||||
# we modified edges inconsistently
|
||||
return s.copy()
|
||||
|
||||
def python_external_spec(self) -> "spack.spec.Spec":
|
||||
"""Python external spec corresponding to the current running interpreter"""
|
||||
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
result = self.host_compiler.default_libc
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
return spack.spec.parse_with_version_concrete(initial_spec)
|
||||
@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
platform_scope = spack.config.DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
generic_scope = spack.config.DirectoryConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
|
||||
@@ -54,6 +54,7 @@
|
||||
import spack.version
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
@@ -268,15 +269,13 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
@@ -303,14 +302,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -457,9 +457,12 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
# Enable ccache in the compiler wrapper
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
else:
|
||||
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -1473,7 +1476,7 @@ def long_message(self):
|
||||
out.write(" {0}\n".format(self.log_name))
|
||||
|
||||
# Also output the test log path IF it exists
|
||||
if self.context != "test":
|
||||
if self.context != "test" and have_log:
|
||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||
if os.path.isfile(test_log):
|
||||
out.write("\nSee test log for details:\n")
|
||||
|
||||
@@ -124,6 +124,8 @@ def cuda_flags(arch_list):
|
||||
# minimum supported versions
|
||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||
|
||||
# maximum supported version
|
||||
# NOTE:
|
||||
@@ -136,14 +138,14 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
@@ -211,12 +213,16 @@ def cuda_flags(arch_list):
|
||||
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
|
||||
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
|
||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
|
||||
|
||||
# PowerPC.
|
||||
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")
|
||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:")
|
||||
|
||||
@@ -72,7 +72,7 @@ def build_directory(self):
|
||||
def build_args(self):
|
||||
"""Arguments for ``go build``."""
|
||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||
return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common utilities for managing intel oneapi packages."""
|
||||
import getpass
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
@@ -13,6 +12,7 @@
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.util.path
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
@@ -99,7 +99,7 @@ def install_component(self, installer_path):
|
||||
# with other install depends on the userid. For root, we
|
||||
# delete the installercache before and after install. For
|
||||
# non root we redefine the HOME environment variable.
|
||||
if getpass.getuser() == "root":
|
||||
if spack.util.path.get_user() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
bash = Executable("bash")
|
||||
@@ -122,7 +122,7 @@ def install_component(self, installer_path):
|
||||
self.prefix,
|
||||
)
|
||||
|
||||
if getpass.getuser() == "root":
|
||||
if spack.util.path.get_user() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
# Some installers have a bug and do not return an error code when failing
|
||||
|
||||
@@ -139,6 +139,10 @@ def configure(self, pkg, spec, prefix):
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
||||
if spec["py-sip"].satisfies("@6.1.0:"):
|
||||
args.extend(["--scripts-dir", pkg.prefix.bin])
|
||||
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
|
||||
|
||||
@@ -34,6 +34,8 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -36,6 +38,7 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -69,7 +72,7 @@
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
)
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
spack_gpg = spack.main.SpackCommand("gpg")
|
||||
@@ -551,10 +554,9 @@ def generate_gitlab_ci_yaml(
|
||||
env,
|
||||
print_summary,
|
||||
output_file,
|
||||
*,
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
run_optimizer=False,
|
||||
use_dependencies=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
@@ -575,12 +577,6 @@ def generate_gitlab_ci_yaml(
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
@@ -814,7 +810,8 @@ def ensure_expected_target_path(path):
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
if not s.writable
|
||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1111,7 +1108,7 @@ def main_script_replacements(cmd):
|
||||
if cdash_handler and cdash_handler.auth_token:
|
||||
try:
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
@@ -1271,17 +1268,6 @@ def main_script_replacements(cmd):
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if run_optimizer:
|
||||
import spack.ci_optimization as ci_opt
|
||||
|
||||
output_object = ci_opt.optimizer(output_object)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
|
||||
output_object = cinw.needs_to_dependencies(output_object)
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
@@ -1310,8 +1296,11 @@ def main_script_replacements(cmd):
|
||||
if not rebuild_everything:
|
||||
sys.exit(1)
|
||||
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
@@ -1382,15 +1371,6 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
spack.mirror.Mirror.from_url(mirror_url).push_url,
|
||||
bindist.PushOptions(force=True, unsigned=not sign_binaries),
|
||||
)
|
||||
|
||||
|
||||
def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
|
||||
"""Push one or more binary packages to the mirror.
|
||||
|
||||
@@ -1401,20 +1381,13 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
|
||||
sign_binaries: If True, spack will attempt to sign binary package before pushing.
|
||||
"""
|
||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||
signing_key = bindist.select_signing_key() if sign_binaries else None
|
||||
try:
|
||||
_push_to_build_cache(spec, sign_binaries, mirror_url)
|
||||
bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key)
|
||||
return True
|
||||
except bindist.PushToBuildCacheError as e:
|
||||
tty.error(str(e))
|
||||
tty.error(f"Problem writing to {mirror_url}: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
# TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
|
||||
# exception instead of parsing str(e)...
|
||||
msg = str(e)
|
||||
if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
||||
tty.error(f"Permission problem writing to {mirror_url}: {msg}")
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
def remove_other_mirrors(mirrors_to_keep, scope=None):
|
||||
@@ -1460,10 +1433,6 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
if not job_spec:
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
@@ -2095,7 +2064,7 @@ def read_broken_spec(broken_spec_url):
|
||||
"""
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||
except (URLError, web_util.SpackWebError, HTTPError):
|
||||
except web_util.SpackWebError:
|
||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get("job")
|
||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||
else needs_entry if isinstance(needs_entry, str) else None
|
||||
)
|
||||
|
||||
|
||||
def convert_job(job_entry):
|
||||
if not isinstance(job_entry, collections.abc.Mapping):
|
||||
return job_entry
|
||||
|
||||
needs = job_entry.get("needs")
|
||||
if needs is None:
|
||||
return job_entry
|
||||
|
||||
new_job = {}
|
||||
new_job.update(job_entry)
|
||||
del new_job["needs"]
|
||||
|
||||
new_job["dependencies"] = list(
|
||||
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
||||
)
|
||||
|
||||
return new_job
|
||||
|
||||
|
||||
def needs_to_dependencies(yaml):
|
||||
return dict((k, convert_job(v)) for k, v in yaml.items())
|
||||
@@ -1,363 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import collections.abc
|
||||
import copy
|
||||
import hashlib
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
return syaml.syaml_dict(
|
||||
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||
)
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def matches(obj, proto):
|
||||
"""Returns True if the test object "obj" matches the prototype object
|
||||
"proto".
|
||||
|
||||
If obj and proto are mappings, obj matches proto if (key in obj) and
|
||||
(obj[key] matches proto[key]) for every key in proto.
|
||||
|
||||
If obj and proto are sequences, obj matches proto if they are of the same
|
||||
length and (a matches b) for every (a,b) in zip(obj, proto).
|
||||
|
||||
Otherwise, obj matches proto if obj == proto.
|
||||
|
||||
Precondition: proto must not have any reference cycles
|
||||
"""
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
if not isinstance(proto, collections.abc.Mapping):
|
||||
return False
|
||||
|
||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
if len(obj) != len(proto):
|
||||
return False
|
||||
|
||||
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
||||
|
||||
return obj == proto
|
||||
|
||||
|
||||
def subkeys(obj, proto):
|
||||
"""Returns the test mapping "obj" after factoring out the items it has in
|
||||
common with the prototype mapping "proto".
|
||||
|
||||
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
||||
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
||||
for every such key, "k", its corresponding value is:
|
||||
|
||||
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
||||
- b[key] if (key in b) and not matches(a[key], b[key]),
|
||||
or
|
||||
- a[key] otherwise
|
||||
|
||||
|
||||
If obj and proto are mappings, the returned object is the smallest object,
|
||||
"a", such that merge(a, proto) matches obj.
|
||||
|
||||
Otherwise, obj is returned.
|
||||
"""
|
||||
if not (
|
||||
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||
):
|
||||
return obj
|
||||
|
||||
new_obj = {}
|
||||
for key, value in obj.items():
|
||||
if key not in proto:
|
||||
new_obj[key] = value
|
||||
continue
|
||||
|
||||
if matches(value, proto[key]) and matches(proto[key], value):
|
||||
continue
|
||||
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
new_obj[key] = subkeys(value, proto[key])
|
||||
continue
|
||||
|
||||
new_obj[key] = value
|
||||
|
||||
return new_obj
|
||||
|
||||
|
||||
def add_extends(yaml, key):
|
||||
"""Modifies the given object "yaml" so that it includes an "extends" key
|
||||
whose value features "key".
|
||||
|
||||
If "extends" is not in yaml, then yaml is modified such that
|
||||
yaml["extends"] == key.
|
||||
|
||||
If yaml["extends"] is a str, then yaml is modified such that
|
||||
yaml["extends"] == [yaml["extends"], key]
|
||||
|
||||
If yaml["extends"] is a list that does not include key, then key is
|
||||
appended to the list.
|
||||
|
||||
Otherwise, yaml is left unchanged.
|
||||
"""
|
||||
|
||||
has_key = "extends" in yaml
|
||||
extends = yaml.get("extends")
|
||||
|
||||
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||
return
|
||||
|
||||
if extends is None:
|
||||
yaml["extends"] = key
|
||||
return
|
||||
|
||||
if isinstance(extends, str):
|
||||
if extends != key:
|
||||
yaml["extends"] = [extends, key]
|
||||
return
|
||||
|
||||
if key not in extends:
|
||||
extends.append(key)
|
||||
|
||||
|
||||
def common_subobject(yaml, sub):
|
||||
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
||||
|
||||
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
||||
|
||||
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
||||
- Otherwise, new[key] = yaml[key].
|
||||
|
||||
If the above match criteria is not satisfied for any such key, then (yaml,
|
||||
None) is returned. The yaml object is returned unchanged.
|
||||
|
||||
Otherwise, each matching value in new is modified as in
|
||||
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
||||
The common_key value is chosen such that it does not match any preexisting
|
||||
key in new. In this case, (new, common_key) is returned.
|
||||
"""
|
||||
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
||||
|
||||
if not match_list:
|
||||
return yaml, None
|
||||
|
||||
common_prefix = ".c"
|
||||
common_index = 0
|
||||
|
||||
while True:
|
||||
common_key = "".join((common_prefix, str(common_index)))
|
||||
if common_key not in yaml:
|
||||
break
|
||||
common_index += 1
|
||||
|
||||
new_yaml = {}
|
||||
|
||||
for key, val in yaml.items():
|
||||
new_yaml[key] = copy.deepcopy(val)
|
||||
|
||||
if not matches(val, sub):
|
||||
continue
|
||||
|
||||
new_yaml[key] = subkeys(new_yaml[key], sub)
|
||||
add_extends(new_yaml[key], common_key)
|
||||
|
||||
new_yaml[common_key] = sub
|
||||
|
||||
return new_yaml, common_key
|
||||
|
||||
|
||||
def print_delta(name, old, new, applied=None):
|
||||
delta = new - old
|
||||
reldelta = (1000 * delta) // old
|
||||
reldelta = (reldelta // 10, reldelta % 10)
|
||||
|
||||
if applied is None:
|
||||
applied = new <= old
|
||||
|
||||
print(
|
||||
"\n".join(
|
||||
(
|
||||
"{0} {1}:",
|
||||
" before: {2: 10d}",
|
||||
" after : {3: 10d}",
|
||||
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
||||
)
|
||||
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
||||
)
|
||||
|
||||
|
||||
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||
"""Try applying an optimization pass and return information about the
|
||||
result
|
||||
|
||||
"name" is a string describing the nature of the pass. If it is a non-empty
|
||||
string, summary statistics are also printed to stdout.
|
||||
|
||||
"yaml" is the object to apply the pass to.
|
||||
|
||||
"optimization_pass" is the function implementing the pass to be applied.
|
||||
|
||||
"args" and "kwargs" are the additional arguments to pass to optimization
|
||||
pass. The pass is applied as
|
||||
|
||||
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
||||
|
||||
The pass's results are greedily rejected if it does not modify the original
|
||||
yaml document, or if it produces a yaml document that serializes to a
|
||||
larger string.
|
||||
|
||||
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
||||
(yaml, new_yaml, applied, other_results) otherwise.
|
||||
"""
|
||||
result = optimization_pass(yaml, *args, **kwargs)
|
||||
new_yaml, other_results = result[0], result[1:]
|
||||
|
||||
if new_yaml is yaml:
|
||||
# pass was not applied
|
||||
return (yaml, new_yaml, False, other_results)
|
||||
|
||||
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||
|
||||
# pass makes the size worse: not applying
|
||||
applied = post_size <= pre_size
|
||||
if applied:
|
||||
yaml, new_yaml = new_yaml, yaml
|
||||
|
||||
if name:
|
||||
print_delta(name, pre_size, post_size, applied)
|
||||
|
||||
return (yaml, new_yaml, applied, other_results)
|
||||
|
||||
|
||||
def build_histogram(iterator, key):
|
||||
"""Builds a histogram of values given an iterable of mappings and a key.
|
||||
|
||||
For each mapping "m" with key "key" in iterator, the value m[key] is
|
||||
considered.
|
||||
|
||||
Returns a list of tuples (hash, count, proportion, value), where
|
||||
|
||||
- "hash" is a sha1sum hash of the value.
|
||||
- "count" is the number of occurences of values that hash to "hash".
|
||||
- "proportion" is the proportion of all values considered above that
|
||||
hash to "hash".
|
||||
- "value" is one of the values considered above that hash to "hash".
|
||||
Which value is chosen when multiple values hash to the same "hash" is
|
||||
undefined.
|
||||
|
||||
The list is sorted in descending order by count, yielding the most
|
||||
frequently occuring hashes first.
|
||||
"""
|
||||
buckets = collections.defaultdict(int)
|
||||
values = {}
|
||||
|
||||
num_objects = 0
|
||||
for obj in iterator:
|
||||
num_objects += 1
|
||||
|
||||
try:
|
||||
val = obj[key]
|
||||
except (KeyError, TypeError):
|
||||
continue
|
||||
|
||||
value_hash = hashlib.sha1()
|
||||
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||
value_hash = value_hash.hexdigest()
|
||||
|
||||
buckets[value_hash] += 1
|
||||
values[value_hash] = val
|
||||
|
||||
return [
|
||||
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
||||
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
||||
]
|
||||
|
||||
|
||||
def optimizer(yaml):
|
||||
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
||||
}
|
||||
|
||||
# look for a list of tags that appear frequently
|
||||
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
||||
|
||||
# If a list of tags is found, and there are more than one job that uses it,
|
||||
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
||||
# add the list to the prototype object.
|
||||
if tags and count > 1 and proportion >= 0.70:
|
||||
common_job["tags"] = tags
|
||||
|
||||
# apply common object factorization
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"general common object factorization", yaml, common_subobject, common_job
|
||||
)
|
||||
|
||||
# look for a common script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"script factorization", yaml, common_subobject, {"script": script}
|
||||
)
|
||||
|
||||
# look for a common before_script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
||||
)
|
||||
|
||||
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
||||
# Try to factor them out.
|
||||
h = build_histogram(
|
||||
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
||||
"SPACK_ROOT_SPEC",
|
||||
)
|
||||
|
||||
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
||||
# environment variable; not just the one that appears with the greatest
|
||||
# frequency. We only require that more than 1 job uses a given instance's
|
||||
# value, because we expect the value to be very large, and so expect even
|
||||
# few-to-one factorizations to yield large space savings.
|
||||
counter = 0
|
||||
for _, count, proportion, spec in h:
|
||||
if count <= 1:
|
||||
continue
|
||||
|
||||
counter += 1
|
||||
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
||||
yaml,
|
||||
common_subobject,
|
||||
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
||||
)
|
||||
|
||||
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
print("\n")
|
||||
print_delta("overall summary", original_size, new_size)
|
||||
print("\n")
|
||||
return yaml
|
||||
@@ -237,7 +237,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -336,6 +336,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
status_fn (typing.Callable): if provided, prepend install-status info
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
"""
|
||||
@@ -359,6 +360,7 @@ def get_arg(name, default=None):
|
||||
groups = get_arg("groups", True)
|
||||
all_headers = get_arg("all_headers", False)
|
||||
output = get_arg("output", sys.stdout)
|
||||
status_fn = get_arg("status_fn", None)
|
||||
|
||||
decorator = get_arg("decorator", None)
|
||||
if decorator is None:
|
||||
@@ -386,6 +388,13 @@ def get_arg(name, default=None):
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
|
||||
if status_fn:
|
||||
# This was copied from spec.tree's colorization logic
|
||||
# then shortened because it seems like status_fn should
|
||||
# always return an InstallStatus
|
||||
string += colorize(status_fn(s).value)
|
||||
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
@@ -444,7 +453,7 @@ def format_list(specs):
|
||||
def filter_loaded_specs(specs):
|
||||
"""Filter a list of specs returning only those that are
|
||||
currently loaded."""
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
|
||||
@@ -165,7 +165,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
for scope in spack.config.CONFIG.writable_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
|
||||
@@ -3,28 +3,24 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import copy
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list
|
||||
from llnl.util.lang import elide_list, stable_partition
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
@@ -35,28 +31,13 @@
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
default_config,
|
||||
default_index_tag,
|
||||
default_manifest,
|
||||
default_tag,
|
||||
tag_is_spec,
|
||||
)
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
list_tags,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
from spack.oci.image import ImageReference
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -70,12 +51,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
push.add_argument(
|
||||
"--allow-root",
|
||||
"-a",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||
push_sign.add_argument(
|
||||
"--unsigned",
|
||||
@@ -118,6 +93,17 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
with_or_without_build_deps = push.add_mutually_exclusive_group()
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--with-build-dependencies",
|
||||
action="store_true",
|
||||
help="include build dependencies in the buildcache",
|
||||
)
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--without-build-dependencies",
|
||||
action="store_true",
|
||||
help="exclude build dependencies from the buildcache",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
@@ -190,10 +176,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
|
||||
keys.set_defaults(func=keys_fn)
|
||||
|
||||
preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
|
||||
arguments.add_common_arguments(preview, ["installed_specs"])
|
||||
preview.set_defaults(func=preview_fn)
|
||||
|
||||
# Check if binaries need to be rebuilt on remote mirror
|
||||
check = subparsers.add_parser("check", help=check_fn.__doc__)
|
||||
check.add_argument(
|
||||
@@ -339,39 +321,6 @@ def _format_spec(spec: Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
def _progress(i: int, total: int):
|
||||
if total > 1:
|
||||
digits = len(str(total))
|
||||
return f"[{i+1:{digits}}/{total}] "
|
||||
return ""
|
||||
|
||||
|
||||
class NoPool:
|
||||
def map(self, func, args):
|
||||
return [func(a) for a in args]
|
||||
|
||||
def starmap(self, func, args):
|
||||
return [func(*a) for a in args]
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
|
||||
|
||||
|
||||
def _make_pool() -> MaybePool:
|
||||
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
|
||||
That leaves only forking"""
|
||||
if multiprocessing.get_start_method() == "fork":
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
else:
|
||||
return NoPool()
|
||||
|
||||
|
||||
def _skip_no_redistribute_for_public(specs):
|
||||
remaining_specs = list()
|
||||
removed_specs = list()
|
||||
@@ -391,6 +340,45 @@ def _skip_no_redistribute_for_public(specs):
|
||||
return remaining_specs
|
||||
|
||||
|
||||
class PackagesAreNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a list of specs is not installed but picked to be packaged."""
|
||||
|
||||
def __init__(self, specs: List[Spec]):
|
||||
super().__init__(
|
||||
"Cannot push non-installed packages",
|
||||
", ".join(elide_list([_format_spec(s) for s in specs], 5)),
|
||||
)
|
||||
|
||||
|
||||
class PackageNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
|
||||
|
||||
def _specs_to_be_packaged(
|
||||
requested: List[Spec], things_to_install: str, build_deps: bool
|
||||
) -> List[Spec]:
|
||||
"""Collect all non-external with or without roots and dependencies"""
|
||||
if "dependencies" not in things_to_install:
|
||||
deptype = dt.NONE
|
||||
elif build_deps:
|
||||
deptype = dt.ALL
|
||||
else:
|
||||
deptype = dt.RUN | dt.LINK | dt.TEST
|
||||
specs = [
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
requested,
|
||||
root="package" in things_to_install,
|
||||
deptype=deptype,
|
||||
order="breadth",
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
specs.reverse()
|
||||
return specs
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -404,11 +392,6 @@ def push_fn(args):
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
mirror: spack.mirror.Mirror = args.mirror
|
||||
|
||||
# Check if this is an OCI image.
|
||||
@@ -427,91 +410,84 @@ def push_fn(args):
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if target_image:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
unsigned = True
|
||||
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
# Select a signing key, or None if unsigned.
|
||||
signing_key = None if unsigned else (args.key or bindist.select_signing_key())
|
||||
|
||||
specs = _specs_to_be_packaged(
|
||||
roots,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
things_to_install=args.things_to_install,
|
||||
build_deps=args.with_build_dependencies or not args.without_build_dependencies,
|
||||
)
|
||||
|
||||
if not args.private:
|
||||
specs = _skip_no_redistribute_for_public(specs)
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
|
||||
|
||||
failed = []
|
||||
# Pushing not installed specs is an error. Either fail fast or populate the error list and
|
||||
# push installed package in best effort mode.
|
||||
failed: List[Tuple[Spec, BaseException]] = []
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if any(not s.installed for s in specs):
|
||||
specs, not_installed = stable_partition(specs, lambda s: s.installed)
|
||||
if args.fail_fast:
|
||||
raise PackagesAreNotInstalledError(not_installed)
|
||||
else:
|
||||
failed.extend(
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
)
|
||||
|
||||
# TODO: unify this logic in the future.
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped, base_images, checksums = _push_oci(
|
||||
with bindist.default_push_context() as (tmpdir, executor):
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
skipped, base_images, checksums, upload_errors = bindist._push_oci(
|
||||
target_image=target_image,
|
||||
base_image=base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=args.force,
|
||||
tmpdir=tmpdir,
|
||||
pool=pool,
|
||||
executor=executor,
|
||||
)
|
||||
|
||||
if upload_errors:
|
||||
failed.extend(upload_errors)
|
||||
|
||||
# Apart from creating manifests for each individual spec, we allow users to create a
|
||||
# separate image tag for all root specs and their runtime dependencies.
|
||||
if args.tag:
|
||||
elif args.tag:
|
||||
tagged_image = target_image.with_tag(args.tag)
|
||||
# _push_oci may not populate base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
_update_base_images(
|
||||
bindist._oci_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
|
||||
bindist._oci_put_manifest(
|
||||
base_images, checksums, tagged_image, tmpdir, None, None, *roots
|
||||
)
|
||||
tty.info(f"Tagged {tagged_image}")
|
||||
|
||||
else:
|
||||
skipped = []
|
||||
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
push_url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {push_url}"
|
||||
tty.info(msg)
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(_format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(
|
||||
e, (bindist.PickKeyException, bindist.NoKeyException)
|
||||
):
|
||||
raise
|
||||
failed.append((_format_spec(spec), e))
|
||||
else:
|
||||
skipped, upload_errors = bindist._push(
|
||||
specs,
|
||||
out_url=push_url,
|
||||
force=args.force,
|
||||
update_index=args.update_index,
|
||||
signing_key=signing_key,
|
||||
tmpdir=tmpdir,
|
||||
executor=executor,
|
||||
)
|
||||
failed.extend(upload_errors)
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
@@ -534,389 +510,22 @@ def push_fn(args):
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
elide_list(
|
||||
[
|
||||
f" {_format_spec(spec)}: {e.__class__.__name__}: {e}"
|
||||
for spec, e in failed
|
||||
],
|
||||
5,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
# Update the OCI index if requested
|
||||
if target_image and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(target_image, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
"""Get the spack tarball layer digests and size if it exists"""
|
||||
try:
|
||||
manifest, config = get_manifest_and_config_with_retry(image_ref)
|
||||
|
||||
return spack.oci.oci.Blob(
|
||||
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
|
||||
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
|
||||
size=manifest["layers"][-1]["size"],
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
Digest.from_sha256(tarfile_checksum),
|
||||
os.path.getsize(filename),
|
||||
)
|
||||
|
||||
# Upload the blob
|
||||
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
|
||||
|
||||
# delete the file
|
||||
os.unlink(filename)
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def _retrieve_env_dict_from_config(config: dict) -> dict:
|
||||
"""Retrieve the environment variables from the image config file.
|
||||
Sets a default value for PATH if it is not present.
|
||||
|
||||
Args:
|
||||
config (dict): The image config file.
|
||||
|
||||
Returns:
|
||||
dict: The environment variables.
|
||||
"""
|
||||
env = {"PATH": "/bin:/usr/bin"}
|
||||
|
||||
if "Env" in config.get("config", {}):
|
||||
for entry in config["config"]["Env"]:
|
||||
key, value = entry.split("=", 1)
|
||||
env[key] = value
|
||||
return env
|
||||
|
||||
|
||||
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
name = spec.target.family.name
|
||||
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
return name_map.get(name, name)
|
||||
|
||||
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
extra_config: Optional[dict],
|
||||
annotations: Optional[dict],
|
||||
*specs: spack.spec.Spec,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(specs[0])
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
specs, order="topo", deptype=("link", "run"), root=True
|
||||
)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
|
||||
# This is because Singularity / Apptainer is very strict about not mixing them.
|
||||
base_manifest_mediaType = base_manifest.get(
|
||||
"mediaType", "application/vnd.oci.image.manifest.v1+json"
|
||||
)
|
||||
use_docker_format = (
|
||||
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
|
||||
)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
|
||||
# Add the diff ids of the dependencies
|
||||
for s in dependencies:
|
||||
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
|
||||
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
if extra_config:
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
config.update(extra_config)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, config_file)
|
||||
)
|
||||
|
||||
# Upload the config file
|
||||
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
|
||||
|
||||
manifest = {
|
||||
"mediaType": base_manifest_mediaType,
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": base_manifest["config"]["mediaType"],
|
||||
"digest": str(config_file_checksum),
|
||||
"size": os.path.getsize(config_file),
|
||||
},
|
||||
"layers": [
|
||||
*(layer for layer in base_manifest["layers"]),
|
||||
*(
|
||||
{
|
||||
"mediaType": (
|
||||
"application/vnd.docker.image.rootfs.diff.tar.gzip"
|
||||
if use_docker_format
|
||||
else "application/vnd.oci.image.layer.v1.tar+gzip"
|
||||
),
|
||||
"digest": str(checksums[s.dag_hash()].compressed_digest),
|
||||
"size": checksums[s.dag_hash()].size,
|
||||
}
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
if not use_docker_format and annotations:
|
||||
manifest["annotations"] = annotations
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref, manifest=manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
|
||||
def _update_base_images(
|
||||
*,
|
||||
base_image: Optional[ImageReference],
|
||||
target_image: ImageReference,
|
||||
spec: spack.spec.Spec,
|
||||
base_image_cache: Dict[str, Tuple[dict, dict]],
|
||||
):
|
||||
"""For a given spec and base image, copy the missing layers of the base image with matching
|
||||
arch to the registry of the target image. If no base image is specified, create a dummy
|
||||
manifest and config file."""
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
if architecture in base_image_cache:
|
||||
return
|
||||
if base_image is None:
|
||||
base_image_cache[architecture] = (
|
||||
default_manifest(),
|
||||
default_config(architecture, "linux"),
|
||||
)
|
||||
else:
|
||||
base_image_cache[architecture] = copy_missing_layers_with_retry(
|
||||
base_image, target_image, architecture
|
||||
)
|
||||
|
||||
|
||||
def _push_oci(
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: MaybePool,
|
||||
force: bool = False,
|
||||
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
image_ref: The target OCI image
|
||||
base_image: Optional base image, which will be copied to the target registry.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
force: Whether to overwrite existing layers and manifests in the buildcache.
|
||||
|
||||
Returns:
|
||||
A tuple consisting of the list of skipped specs already in the build cache,
|
||||
a dictionary mapping architectures to base image manifests and configs,
|
||||
and a dictionary mapping each spec's dag hash to a blob.
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
# arch -> (manifest, config)
|
||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
if maybe_blob is not None:
|
||||
checksums[spec.dag_hash()] = maybe_blob
|
||||
skipped.append(_format_spec(spec))
|
||||
else:
|
||||
to_be_uploaded.append(spec)
|
||||
else:
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped, base_images, checksums
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to "
|
||||
f"{target_image.domain}/{target_image.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base images if necessary
|
||||
for spec in to_be_uploaded:
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
return spec_dict
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pool.starmap(
|
||||
_put_manifest,
|
||||
(
|
||||
(
|
||||
base_images,
|
||||
checksums,
|
||||
target_image.with_tag(default_tag(spec)),
|
||||
tmpdir,
|
||||
extra_config(spec),
|
||||
{"org.opencontainers.image.description": spec.format()},
|
||||
spec,
|
||||
)
|
||||
for spec in to_be_uploaded
|
||||
),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec in to_be_uploaded:
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
|
||||
|
||||
return skipped, base_images, checksums
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
# Don't allow recursion here, since Spack itself always uploads
|
||||
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
|
||||
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
|
||||
|
||||
# Do very basic validation: if "spec" is a key in the config, it
|
||||
# must be a Spec object too.
|
||||
return config if "spec" in config else None
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
tags = list_tags(image_ref)
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
|
||||
)
|
||||
|
||||
# Populate the database
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = bindist.BuildCacheDatabase(db_root_dir)
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
empty_config_json_path = os.path.join(tmpdir, "config.json")
|
||||
with open(empty_config_json_path, "wb") as f:
|
||||
f.write(b"{}")
|
||||
|
||||
# Upload the index.json file
|
||||
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
|
||||
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
|
||||
|
||||
# Upload the config.json file
|
||||
empty_config_digest = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
|
||||
)
|
||||
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
|
||||
|
||||
# Push a manifest file that references the index.json file as a layer
|
||||
# Notice that we push this as if it is an image, which it of course is not.
|
||||
# When the ORAS spec becomes official, we can use that instead of a fake image.
|
||||
# For now we just use the OCI image spec, so that we don't run into issues with
|
||||
# automatic garbage collection of blobs that are not referenced by any image manifest.
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
# Config is just an empty {} file for now, and irrelevant
|
||||
"config": {
|
||||
"mediaType": "application/vnd.oci.image.config.v1+json",
|
||||
"digest": str(empty_config_digest),
|
||||
"size": os.path.getsize(empty_config_json_path),
|
||||
},
|
||||
# The buildcache index is the only layer, and is not a tarball, we lie here.
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(index_shasum),
|
||||
"size": os.path.getsize(index_json_path),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(target_image, tmpdir, executor)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
@@ -960,14 +569,6 @@ def keys_fn(args):
|
||||
bindist.get_keys(args.install, args.trust, args.force)
|
||||
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
tty.warn(
|
||||
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
||||
"now the default. This command will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
|
||||
def check_fn(args: argparse.Namespace):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
@@ -1205,14 +806,15 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(image_ref, tmpdir, executor)
|
||||
return
|
||||
|
||||
# Otherwise, assume a normal mirror.
|
||||
url = mirror.push_url
|
||||
|
||||
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_package_index(url, tmpdir)
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
@@ -1220,7 +822,8 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
)
|
||||
|
||||
try:
|
||||
bindist.generate_key_index(keys_url)
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_key_index(keys_url, tmpdir)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import warnings
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -73,7 +74,7 @@ def setup_parser(subparser):
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
@@ -81,7 +82,7 @@ def setup_parser(subparser):
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
@@ -200,6 +201,18 @@ def ci_generate(args):
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
if args.optimize:
|
||||
warnings.warn(
|
||||
"The --optimize option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
if args.dependencies:
|
||||
warnings.warn(
|
||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
@@ -212,8 +225,6 @@ def ci_generate(args):
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
use_dependencies = args.dependencies
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
@@ -234,8 +245,6 @@ def ci_generate(args):
|
||||
output_file,
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -867,9 +866,6 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
||||
if args.update_completion:
|
||||
fs.set_executable(args.update)
|
||||
|
||||
else:
|
||||
prepend_header(args, sys.stdout)
|
||||
formatter(args, sys.stdout)
|
||||
|
||||
@@ -156,7 +156,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if env is not None:
|
||||
pristine = env.manifest.pristine_yaml_content
|
||||
pristine = env.manifest.yaml_content
|
||||
flattened = pristine.copy()
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
||||
else:
|
||||
@@ -264,7 +264,9 @@ def config_remove(args):
|
||||
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
if isinstance(scope, spack.config.SingleFileScope):
|
||||
return fs.can_access(cfg_file)
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
elif isinstance(scope, spack.config.DirectoryConfigScope):
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
return False
|
||||
|
||||
|
||||
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
||||
@@ -362,14 +364,11 @@ def config_change(args):
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
updates: List[spack.config.ConfigScope] = [
|
||||
x
|
||||
for x in spack.config.CONFIG.format_updates[args.section]
|
||||
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
|
||||
]
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
@@ -447,7 +446,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -934,7 +933,7 @@ def get_repository(args, name):
|
||||
# Figure out where the new package should live
|
||||
repo_path = args.repo
|
||||
if repo_path is not None:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
repo = spack.repo.from_path(repo_path)
|
||||
if spec.namespace and spec.namespace != repo.namespace:
|
||||
tty.die(
|
||||
"Can't create package with namespace {0} in repo with "
|
||||
@@ -942,9 +941,7 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from glob import glob
|
||||
|
||||
@@ -62,9 +63,10 @@ def create_db_tarball(args):
|
||||
|
||||
base = os.path.basename(str(spack.store.STORE.root))
|
||||
transform_args = []
|
||||
# Currently --transform and -s are not supported by Windows native tar
|
||||
if "GNU" in tar("--version", output=str):
|
||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||
else:
|
||||
elif sys.platform != "win32":
|
||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||
@@ -90,7 +92,6 @@ def report(args):
|
||||
print("* **Spack:**", get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
print("* **Platform:**", architecture)
|
||||
print("* **Concretizer:**", spack.config.get("config:concretizer"))
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
|
||||
@@ -47,16 +47,6 @@ def inverted_dependencies():
|
||||
dependents of, e.g., `mpi`, but virtuals are not included as
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies_by_name():
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
dag = collections.defaultdict(set)
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
for _, deps_by_name in pkg_cls.dependencies.items():
|
||||
|
||||
@@ -9,6 +9,8 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
@@ -69,14 +71,15 @@ def _retrieve_develop_source(spec, abspath):
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.stage[0].fetcher.get_full_repo = True
|
||||
source_stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
package.stage[0].disable_mirrors()
|
||||
source_stage.disable_mirrors()
|
||||
|
||||
package.stage[0].fetcher.set_package(package)
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
|
||||
@@ -123,7 +123,7 @@ def edit(parser, args):
|
||||
spack.util.editor.editor(*paths)
|
||||
elif names:
|
||||
if args.repo:
|
||||
repo = spack.repo.Repo(args.repo)
|
||||
repo = spack.repo.from_path(args.repo)
|
||||
elif args.namespace:
|
||||
repo = spack.repo.PATH.get_repo(args.namespace)
|
||||
else:
|
||||
|
||||
@@ -468,32 +468,30 @@ def env_remove(args):
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env_name)
|
||||
valid_envs.append(env)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
read_envs.append(env)
|
||||
remove_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# don't check if environment is included to itself
|
||||
if name == env_name:
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
@@ -506,7 +504,7 @@ def env_remove(args):
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in read_envs:
|
||||
for env in remove_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Set
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
@@ -19,6 +19,7 @@
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
@@ -138,14 +139,26 @@ def external_find(args):
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
new_specs = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
|
||||
# If the user runs `spack external find --not-buildable mpich` we also mark `mpi` non-buildable
|
||||
# to avoid that the concretizer picks a different mpi provider.
|
||||
if new_specs and args.not_buildable:
|
||||
virtuals: Set[str] = {
|
||||
virtual.name
|
||||
for new_spec in new_specs
|
||||
for virtual_specs in spack.repo.PATH.get_pkg_class(new_spec.name).provided.values()
|
||||
for virtual in virtual_specs
|
||||
}
|
||||
new_virtuals = spack.detection.set_virtuals_nonbuildable(virtuals, scope=args.scope)
|
||||
new_specs.extend(spack.spec.Spec(name) for name in new_virtuals)
|
||||
|
||||
if new_specs:
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
tty.msg(f"The following specs have been detected on this system and added to {path}")
|
||||
spack.cmd.display_specs(new_specs)
|
||||
else:
|
||||
tty.msg("No new external packages detected")
|
||||
|
||||
|
||||
@@ -46,6 +46,10 @@ def setup_parser(subparser):
|
||||
help="output specs as machine-readable json records",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||
)
|
||||
@@ -293,25 +297,24 @@ def root_decorator(spec, string):
|
||||
)
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||
print()
|
||||
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
if not env and args.show_concretized:
|
||||
tty.die("-c / --show-concretized requires an active environment")
|
||||
|
||||
if env:
|
||||
if args.constraint:
|
||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
||||
results = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
results = env.all_specs()
|
||||
else:
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
@@ -332,6 +335,11 @@ def find(parser, args):
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
|
||||
if args.install_status or args.show_concretized:
|
||||
status_fn = spack.spec.Spec.install_status
|
||||
else:
|
||||
status_fn = None
|
||||
|
||||
# Display the result
|
||||
if args.json:
|
||||
cmd.display_specs_as_json(results, deps=args.deps)
|
||||
@@ -340,12 +348,34 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
display_results = results
|
||||
if not args.show_concretized:
|
||||
display_results = list(x for x in results if x.installed)
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
)
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
installed_suffix = ""
|
||||
concretized_suffix = " to be installed"
|
||||
|
||||
if args.only_roots:
|
||||
installed_suffix += " (not shown)"
|
||||
concretized_suffix += " (not shown)"
|
||||
else:
|
||||
if env and not args.show_concretized:
|
||||
concretized_suffix += " (show with `spack find -c`)"
|
||||
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
||||
)
|
||||
|
||||
if env:
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if not x.installed),
|
||||
"concretized",
|
||||
suffix=concretized_suffix,
|
||||
)
|
||||
|
||||
@@ -56,7 +56,6 @@ def roots_from_environments(args, active_env):
|
||||
|
||||
# -e says "also preserve things needed by this particular env"
|
||||
for env_name_or_dir in args.except_environment:
|
||||
print("HMM", env_name_or_dir)
|
||||
if ev.exists(env_name_or_dir):
|
||||
env = ev.read(env_name_or_dir)
|
||||
elif ev.is_env_dir(env_name_or_dir):
|
||||
|
||||
@@ -5,10 +5,12 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.stage
|
||||
import spack.util.gpg
|
||||
import spack.util.url
|
||||
from spack.cmd.common import arguments
|
||||
@@ -115,6 +117,7 @@ def setup_parser(subparser):
|
||||
help="URL of the mirror where keys will be published",
|
||||
)
|
||||
publish.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
@@ -220,9 +223,10 @@ def gpg_publish(args):
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, regenerate_index=args.rebuild_index
|
||||
)
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
|
||||
)
|
||||
|
||||
|
||||
def gpg(parser, args):
|
||||
|
||||
@@ -502,7 +502,7 @@ def print_licenses(pkg, args):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
||||
@@ -169,7 +169,9 @@ def pkg_hash(args):
|
||||
|
||||
def get_grep(required=False):
|
||||
"""Get a grep command to use with ``spack pkg grep``."""
|
||||
return exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
|
||||
grep = exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
|
||||
grep.ignore_quotes = True # allow `spack pkg grep '"quoted string"'` without warning
|
||||
return grep
|
||||
|
||||
|
||||
def pkg_grep(args, unknown_args):
|
||||
|
||||
@@ -91,7 +91,7 @@ def repo_add(args):
|
||||
tty.die("Not a Spack repository: %s" % path)
|
||||
|
||||
# Make sure it's actually a spack repository by constructing it.
|
||||
repo = spack.repo.Repo(canon_path)
|
||||
repo = spack.repo.from_path(canon_path)
|
||||
|
||||
# If that succeeds, finally add it to the configuration.
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
@@ -124,7 +124,7 @@ def repo_remove(args):
|
||||
# If it is a namespace, remove corresponding repo
|
||||
for path in repos:
|
||||
try:
|
||||
repo = spack.repo.Repo(path)
|
||||
repo = spack.repo.from_path(path)
|
||||
if repo.namespace == namespace_or_path:
|
||||
repos.remove(path)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
@@ -142,7 +142,7 @@ def repo_list(args):
|
||||
repos = []
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(spack.repo.Repo(r))
|
||||
repos.append(spack.repo.from_path(r))
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ def unload(parser, args):
|
||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||
)
|
||||
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
if args.specs:
|
||||
specs = [
|
||||
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
||||
|
||||
@@ -339,7 +339,7 @@ def add(self, pkg_name, fetcher):
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
for v in list(pkg_cls.versions):
|
||||
try:
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
fetcher = fs.for_package_version(pkg, v)
|
||||
|
||||
@@ -23,11 +23,6 @@ def setup_parser(subparser):
|
||||
output.add_argument(
|
||||
"-s", "--safe", action="store_true", help="only list safe versions of the package"
|
||||
)
|
||||
output.add_argument(
|
||||
"--safe-only",
|
||||
action="store_true",
|
||||
help="[deprecated] only list safe versions of the package",
|
||||
)
|
||||
output.add_argument(
|
||||
"-r", "--remote", action="store_true", help="only list remote versions of the package"
|
||||
)
|
||||
@@ -47,17 +42,13 @@ def versions(parser, args):
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
if args.safe_only:
|
||||
tty.warn('"--safe-only" is deprecated. Use "--safe" instead.')
|
||||
args.safe = args.safe_only
|
||||
|
||||
if not (args.remote or args.new):
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Safe versions (already checksummed):")
|
||||
|
||||
if not safe_versions:
|
||||
if sys.stdout.isatty():
|
||||
tty.warn("Found no versions for {0}".format(pkg.name))
|
||||
tty.warn(f"Found no versions for {pkg.name}")
|
||||
tty.debug("Manually add versions to the package.")
|
||||
else:
|
||||
colify(sorted(safe_versions, reverse=True), indent=2)
|
||||
@@ -83,12 +74,12 @@ def versions(parser, args):
|
||||
if not remote_versions:
|
||||
if sys.stdout.isatty():
|
||||
if not fetched_versions:
|
||||
tty.warn("Found no versions for {0}".format(pkg.name))
|
||||
tty.warn(f"Found no versions for {pkg.name}")
|
||||
tty.debug(
|
||||
"Check the list_url and list_depth attributes of "
|
||||
"the package to help Spack find versions."
|
||||
)
|
||||
else:
|
||||
tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
|
||||
tty.warn(f"Found no unchecksummed versions for {pkg.name}")
|
||||
else:
|
||||
colify(sorted(remote_versions, reverse=True), indent=2)
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
@@ -279,11 +278,6 @@ def debug_flags(self):
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
||||
|
||||
# Cray PrgEnv name that can be used to load this compiler
|
||||
PrgEnv: Optional[str] = None
|
||||
# Name of module used to switch versions of this compiler
|
||||
PrgEnv_compiler: Optional[str] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cspec,
|
||||
|
||||
@@ -260,7 +260,7 @@ def _init_compiler_config(
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
for scope in config.writable_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
@@ -488,7 +488,7 @@ def supported_compilers_for_host_platform() -> List[str]:
|
||||
return supported_compilers_for_platform(host_plat)
|
||||
|
||||
|
||||
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
|
||||
def supported_compilers_for_platform(platform: "spack.platforms.Platform") -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the provided platform
|
||||
|
||||
|
||||
@@ -25,9 +25,6 @@ class Aocc(Compiler):
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang"]
|
||||
|
||||
PrgEnv = "PrgEnv-aocc"
|
||||
PrgEnv_compiler = "aocc"
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@property
|
||||
|
||||
@@ -34,12 +34,9 @@ def __init__(self, *args, **kwargs):
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
||||
PrgEnv = "PrgEnv-cray"
|
||||
PrgEnv_compiler = "cce"
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
if any(self.PrgEnv in m for m in self.modules):
|
||||
if any("PrgEnv-cray" in m for m in self.modules):
|
||||
# Old module-based interface to cray compilers
|
||||
return {
|
||||
"cc": os.path.join("cce", "cc"),
|
||||
|
||||
@@ -40,9 +40,6 @@ class Gcc(spack.compiler.Compiler):
|
||||
"fc": os.path.join("gcc", "gfortran"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-gnu"
|
||||
PrgEnv_compiler = "gcc"
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@@ -31,9 +31,6 @@ class Intel(Compiler):
|
||||
"fc": os.path.join("intel", "ifort"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-intel"
|
||||
PrgEnv_compiler = "intel"
|
||||
|
||||
if sys.platform == "win32":
|
||||
version_argument = "/QV"
|
||||
else:
|
||||
@@ -126,3 +123,14 @@ def fc_pic_flag(self):
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-cxxlib",)
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
@@ -231,24 +231,55 @@ def msvc_version(self):
|
||||
|
||||
@property
|
||||
def short_msvc_version(self):
|
||||
"""This is the shorthand VCToolset version of form
|
||||
MSVC<short-ver>
|
||||
"""
|
||||
This is the shorthand VCToolset version of form
|
||||
MSVC<short-ver> *NOT* the full version, for that see
|
||||
return "MSVC" + self.vc_toolset_ver
|
||||
|
||||
@property
|
||||
def vc_toolset_ver(self):
|
||||
"""
|
||||
The toolset version is the version of the combined set of cl and link
|
||||
This typically relates directly to VS version i.e. VS 2022 is v143
|
||||
VS 19 is v142, etc.
|
||||
This value is defined by the first three digits of the major + minor
|
||||
version of the VS toolset (143 for 14.3x.bbbbb). Traditionally the
|
||||
minor version has remained a static two digit number for a VS release
|
||||
series, however, as of VS22, this is no longer true, both
|
||||
14.4x.bbbbb and 14.3x.bbbbb are considered valid VS22 VC toolset
|
||||
versions due to a change in toolset minor version sentiment.
|
||||
|
||||
This is *NOT* the full version, for that see
|
||||
Msvc.msvc_version or MSVC.platform_toolset_ver for the
|
||||
raw platform toolset version
|
||||
|
||||
"""
|
||||
ver = self.platform_toolset_ver
|
||||
return "MSVC" + ver
|
||||
ver = self.msvc_version[:2].joined.string[:3]
|
||||
return ver
|
||||
|
||||
@property
|
||||
def platform_toolset_ver(self):
|
||||
"""
|
||||
This is the platform toolset version of current MSVC compiler
|
||||
i.e. 142.
|
||||
i.e. 142. The platform toolset is the targeted MSVC library/compiler
|
||||
versions by compilation (this is different from the VC Toolset)
|
||||
|
||||
|
||||
This is different from the VC toolset version as established
|
||||
by `short_msvc_version`
|
||||
by `short_msvc_version`, but typically are represented by the same
|
||||
three digit value
|
||||
"""
|
||||
return self.msvc_version[:2].joined.string[:3]
|
||||
# Typically VS toolset version and platform toolset versions match
|
||||
# VS22 introduces the first divergence of VS toolset version
|
||||
# (144 for "recent" releases) and platform toolset version (143)
|
||||
# so it needs additional handling until MS releases v144
|
||||
# (assuming v144 is also for VS22)
|
||||
# or adds better support for detection
|
||||
# TODO: (johnwparent) Update this logic for the next platform toolset
|
||||
# or VC toolset version update
|
||||
toolset_ver = self.vc_toolset_ver
|
||||
vs22_toolset = Version(toolset_ver) > Version("142")
|
||||
return toolset_ver if not vs22_toolset else "143"
|
||||
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
|
||||
@@ -29,9 +29,6 @@ class Nvhpc(Compiler):
|
||||
"fc": os.path.join("nvhpc", "nvfortran"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-nvhpc"
|
||||
PrgEnv_compiler = "nvhpc"
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"nv[^ ]* (?:[^ ]+ Dev-r)?([0-9.]+)(?:-[0-9]+)?"
|
||||
|
||||
|
||||
@@ -4,11 +4,12 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
from os.path import dirname
|
||||
from os.path import dirname, join
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Oneapi(Compiler):
|
||||
@@ -32,9 +33,6 @@ class Oneapi(Compiler):
|
||||
"fc": os.path.join("oneapi", "ifx"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-oneapi"
|
||||
PrgEnv_compiler = "oneapi"
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"(?:(?:oneAPI DPC\+\+(?:\/C\+\+)? Compiler)|(?:\(IFORT\))|(?:\(IFX\))) (\S+)"
|
||||
|
||||
@@ -135,8 +133,22 @@ def setup_custom_environment(self, pkg, env):
|
||||
# It is located in the same directory as the driver. Error message:
|
||||
# clang++: error: unable to execute command:
|
||||
# Executable "sycl-post-link" doesn't exist!
|
||||
if self.cxx:
|
||||
# also ensures that shared objects and libraries required by the compiler,
|
||||
# e.g. libonnx, can be found succesfully
|
||||
# due to a fix, this is no longer required for OneAPI versions >= 2024.2
|
||||
if self.cxx and pkg.spec.satisfies("%oneapi@:2024.1"):
|
||||
env.prepend_path("PATH", dirname(self.cxx))
|
||||
env.prepend_path("LD_LIBRARY_PATH", join(dirname(dirname(self.cxx)), "lib"))
|
||||
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
# 2024 release bumped the libsycl version because of an ABI
|
||||
# change, 2024 compilers are required. You will see this
|
||||
|
||||
@@ -30,9 +30,6 @@ class Pgi(Compiler):
|
||||
"fc": os.path.join("pgi", "pgfortran"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-pgi"
|
||||
PrgEnv_compiler = "pgi"
|
||||
|
||||
version_argument = "-V"
|
||||
ignore_version_errors = [2] # `pgcc -V` on PowerPC annoyingly returns 2
|
||||
version_regex = r"pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on "
|
||||
|
||||
@@ -23,9 +23,6 @@ class Rocmcc(spack.compilers.clang.Clang):
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["amdflang"]
|
||||
|
||||
PrgEnv = "PrgEnv-amd"
|
||||
PrgEnv_compiler = "amd"
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
link_paths = {
|
||||
|
||||
@@ -2,29 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""
|
||||
Functions here are used to take abstract specs and make them concrete.
|
||||
For example, if a spec asks for a version between 1.8 and 1.9, these
|
||||
functions might take will take the most recent 1.9 version of the
|
||||
package available. Or, if the user didn't specify a compiler for a
|
||||
spec, then this will assign a compiler to the spec based on defaults
|
||||
or user preferences.
|
||||
|
||||
TODO: make this customizable and allow users to configure
|
||||
concretization policies.
|
||||
(DEPRECATED) Used to contain the code for the original concretizer
|
||||
"""
|
||||
import functools
|
||||
import platform
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain
|
||||
from typing import Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
@@ -37,639 +19,20 @@
|
||||
import spack.target
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
#: impements rudimentary logic for ABI compatibility
|
||||
_abi: Union[spack.abi.ABI, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
|
||||
lambda: spack.abi.ABI()
|
||||
)
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class reverse_order:
|
||||
"""Helper for creating key functions.
|
||||
|
||||
This is a wrapper that inverts the sense of the natural
|
||||
comparisons on the object.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return other.value == self.value
|
||||
|
||||
def __lt__(self, other):
|
||||
return other.value < self.value
|
||||
|
||||
|
||||
class Concretizer:
|
||||
"""You can subclass this class to override some of the default
|
||||
concretization strategies, or you can override all of them.
|
||||
"""
|
||||
"""(DEPRECATED) Only contains logic to enable/disable compiler existence checks."""
|
||||
|
||||
#: Controls whether we check that compiler versions actually exist
|
||||
#: during concretization. Used for testing and for mirror creation
|
||||
check_for_compiler_existence = None
|
||||
|
||||
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
|
||||
#: Those will not be considered as providers for virtuals.
|
||||
non_buildable_packages = {"glibc", "musl"}
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
def __init__(self):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
self._adjust_target_answer_generator = None
|
||||
|
||||
def concretize_develop(self, spec):
|
||||
"""
|
||||
Add ``dev_path=*`` variant to packages built from local source.
|
||||
"""
|
||||
env = spack.environment.active_environment()
|
||||
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
||||
if not dev_info:
|
||||
return False
|
||||
|
||||
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
|
||||
|
||||
if "dev_path" in spec.variants:
|
||||
assert spec.variants["dev_path"].value == path
|
||||
changed = False
|
||||
else:
|
||||
spec.variants.setdefault("dev_path", vt.SingleValuedVariant("dev_path", path))
|
||||
changed = True
|
||||
changed |= spec.constrain(dev_info["spec"])
|
||||
return changed
|
||||
|
||||
def _valid_virtuals_and_externals(self, spec):
|
||||
"""Returns a list of candidate virtual dep providers and external
|
||||
packages that coiuld be used to concretize a spec.
|
||||
|
||||
Preferred specs come first in the list.
|
||||
"""
|
||||
# First construct a list of concrete candidates to replace spec with.
|
||||
candidates = [spec]
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = [
|
||||
s
|
||||
for s in spack.repo.PATH.providers_for(spec)
|
||||
if s.name not in self.non_buildable_packages
|
||||
]
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
# Find nearest spec in the DAG (up then down) that has prefs.
|
||||
spec_w_prefs = find_spec(
|
||||
spec, lambda p: PackagePrefs.has_preferred_providers(p.name, spec.name), spec
|
||||
) # default to spec itself.
|
||||
|
||||
# Create a key to sort candidates by the prefs we found
|
||||
pref_key = PackagePrefs(spec_w_prefs.name, "providers", spec.name)
|
||||
|
||||
# For each candidate package, if it has externals, add those
|
||||
# to the usable list. if it's not buildable, then *only* add
|
||||
# the externals.
|
||||
usable = []
|
||||
for cspec in candidates:
|
||||
if is_spec_buildable(cspec):
|
||||
usable.append(cspec)
|
||||
|
||||
externals = spec_externals(cspec)
|
||||
for ext in externals:
|
||||
if ext.intersects(spec):
|
||||
usable.append(ext)
|
||||
|
||||
# If nothing is in the usable list now, it's because we aren't
|
||||
# allowed to build anything.
|
||||
if not usable:
|
||||
raise NoBuildError(spec)
|
||||
|
||||
# Use a sort key to order the results
|
||||
return sorted(
|
||||
usable,
|
||||
key=lambda spec: (
|
||||
not spec.external, # prefer externals
|
||||
pref_key(spec), # respect prefs
|
||||
spec.name, # group by name
|
||||
reverse_order(spec.versions), # latest version
|
||||
spec, # natural order
|
||||
),
|
||||
)
|
||||
|
||||
def choose_virtual_or_external(self, spec: spack.spec.Spec):
|
||||
"""Given a list of candidate virtual and external packages, try to
|
||||
find one that is most ABI compatible.
|
||||
"""
|
||||
candidates = self._valid_virtuals_and_externals(spec)
|
||||
if not candidates:
|
||||
return candidates
|
||||
|
||||
# Find the nearest spec in the dag that has a compiler. We'll
|
||||
# use that spec to calibrate compiler compatibility.
|
||||
abi_exemplar = find_spec(spec, lambda x: x.compiler)
|
||||
if abi_exemplar is None:
|
||||
abi_exemplar = spec.root
|
||||
|
||||
# Sort candidates from most to least compatibility.
|
||||
# We reverse because True > False.
|
||||
# Sort is stable, so candidates keep their order.
|
||||
return sorted(
|
||||
candidates,
|
||||
reverse=True,
|
||||
key=lambda spec: (
|
||||
_abi.compatible(spec, abi_exemplar, loose=True),
|
||||
_abi.compatible(spec, abi_exemplar),
|
||||
),
|
||||
)
|
||||
|
||||
def concretize_version(self, spec):
|
||||
"""If the spec is already concrete, return. Otherwise take
|
||||
the preferred version from spackconfig, and default to the package's
|
||||
version if there are no available versions.
|
||||
|
||||
TODO: In many cases we probably want to look for installed
|
||||
versions of each package and use an installed version
|
||||
if we can link to it. The policy implemented here will
|
||||
tend to rebuild a lot of stuff becasue it will prefer
|
||||
a compiler in the spec to any compiler already-
|
||||
installed things were built with. There is likely
|
||||
some better policy that finds some middle ground
|
||||
between these two extremes.
|
||||
"""
|
||||
# return if already concrete.
|
||||
if spec.versions.concrete:
|
||||
return False
|
||||
|
||||
# List of versions we could consider, in sorted order
|
||||
pkg_versions = spec.package_class.versions
|
||||
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
|
||||
|
||||
yaml_prefs = PackagePrefs(spec.name, "version")
|
||||
|
||||
# The keys below show the order of precedence of factors used
|
||||
# to select a version when concretizing. The item with
|
||||
# the "largest" key will be selected.
|
||||
#
|
||||
# NOTE: When COMPARING VERSIONS, the '@develop' version is always
|
||||
# larger than other versions. BUT when CONCRETIZING,
|
||||
# the largest NON-develop version is selected by default.
|
||||
keyfn = lambda v: (
|
||||
# ------- Special direction from the user
|
||||
# Respect order listed in packages.yaml
|
||||
-yaml_prefs(v),
|
||||
# The preferred=True flag (packages or packages.yaml or both?)
|
||||
pkg_versions.get(v).get("preferred", False),
|
||||
# ------- Regular case: use latest non-develop version by default.
|
||||
# Avoid @develop version, which would otherwise be the "largest"
|
||||
# in straight version comparisons
|
||||
not v.isdevelop(),
|
||||
# Compare the version itself
|
||||
# This includes the logic:
|
||||
# a) develop > everything (disabled by "not v.isdevelop() above)
|
||||
# b) numeric > non-numeric
|
||||
# c) Numeric or string comparison
|
||||
v,
|
||||
)
|
||||
usable.sort(key=keyfn, reverse=True)
|
||||
|
||||
if usable:
|
||||
spec.versions = ver([usable[0]])
|
||||
else:
|
||||
# We don't know of any SAFE versions that match the given
|
||||
# spec. Grab the spec's versions and grab the highest
|
||||
# *non-open* part of the range of versions it specifies.
|
||||
# Someone else can raise an error if this happens,
|
||||
# e.g. when we go to fetch it and don't know how. But it
|
||||
# *might* work.
|
||||
if not spec.versions or spec.versions == VersionList([":"]):
|
||||
raise NoValidVersionError(spec)
|
||||
else:
|
||||
last = spec.versions[-1]
|
||||
if isinstance(last, ClosedOpenRange):
|
||||
range_as_version = VersionList([last]).concrete_range_as_version
|
||||
if range_as_version:
|
||||
spec.versions = ver([range_as_version])
|
||||
else:
|
||||
raise NoValidVersionError(spec)
|
||||
else:
|
||||
spec.versions = ver([last])
|
||||
|
||||
return True # Things changed
|
||||
|
||||
def concretize_architecture(self, spec):
|
||||
"""If the spec is empty provide the defaults of the platform. If the
|
||||
architecture is not a string type, then check if either the platform,
|
||||
target or operating system are concretized. If any of the fields are
|
||||
changed then return True. If everything is concretized (i.e the
|
||||
architecture attribute is a namedtuple of classes) then return False.
|
||||
If the target is a string type, then convert the string into a
|
||||
concretized architecture. If it has no architecture and the root of the
|
||||
DAG has an architecture, then use the root otherwise use the defaults
|
||||
on the platform.
|
||||
"""
|
||||
# ensure type safety for the architecture
|
||||
if spec.architecture is None:
|
||||
spec.architecture = spack.spec.ArchSpec()
|
||||
|
||||
if spec.architecture.concrete:
|
||||
return False
|
||||
|
||||
# Get platform of nearest spec with a platform, including spec
|
||||
# If spec has a platform, easy
|
||||
if spec.architecture.platform:
|
||||
new_plat = spack.platforms.by_name(spec.architecture.platform)
|
||||
else:
|
||||
# Else if anyone else has a platform, take the closest one
|
||||
# Search up, then down, along build/link deps first
|
||||
# Then any nearest. Algorithm from compilerspec search
|
||||
platform_spec = find_spec(spec, lambda x: x.architecture and x.architecture.platform)
|
||||
if platform_spec:
|
||||
new_plat = spack.platforms.by_name(platform_spec.architecture.platform)
|
||||
else:
|
||||
# If no platform anywhere in this spec, grab the default
|
||||
new_plat = spack.platforms.host()
|
||||
|
||||
# Get nearest spec with relevant platform and an os
|
||||
# Generally, same algorithm as finding platform, except we only
|
||||
# consider specs that have a platform
|
||||
if spec.architecture.os:
|
||||
new_os = spec.architecture.os
|
||||
else:
|
||||
new_os_spec = find_spec(
|
||||
spec,
|
||||
lambda x: (
|
||||
x.architecture
|
||||
and x.architecture.platform == str(new_plat)
|
||||
and x.architecture.os
|
||||
),
|
||||
)
|
||||
if new_os_spec:
|
||||
new_os = new_os_spec.architecture.os
|
||||
else:
|
||||
new_os = new_plat.operating_system("default_os")
|
||||
|
||||
# Get the nearest spec with relevant platform and a target
|
||||
# Generally, same algorithm as finding os
|
||||
curr_target = None
|
||||
if spec.architecture.target:
|
||||
curr_target = spec.architecture.target
|
||||
if spec.architecture.target and spec.architecture.target_concrete:
|
||||
new_target = spec.architecture.target
|
||||
else:
|
||||
new_target_spec = find_spec(
|
||||
spec,
|
||||
lambda x: (
|
||||
x.architecture
|
||||
and x.architecture.platform == str(new_plat)
|
||||
and x.architecture.target
|
||||
and x.architecture.target != curr_target
|
||||
),
|
||||
)
|
||||
if new_target_spec:
|
||||
if curr_target:
|
||||
# constrain one target by the other
|
||||
new_target_arch = spack.spec.ArchSpec(
|
||||
(None, None, new_target_spec.architecture.target)
|
||||
)
|
||||
curr_target_arch = spack.spec.ArchSpec((None, None, curr_target))
|
||||
curr_target_arch.constrain(new_target_arch)
|
||||
new_target = curr_target_arch.target
|
||||
else:
|
||||
new_target = new_target_spec.architecture.target
|
||||
else:
|
||||
# To get default platform, consider package prefs
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
new_target = self.target_from_package_preferences(spec)
|
||||
else:
|
||||
new_target = new_plat.target("default_target")
|
||||
if curr_target:
|
||||
# convert to ArchSpec to compare satisfaction
|
||||
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
|
||||
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
|
||||
|
||||
if not new_target_arch.intersects(curr_target_arch):
|
||||
# new_target is an incorrect guess based on preferences
|
||||
# and/or default
|
||||
valid_target_ranges = str(curr_target).split(",")
|
||||
for target_range in valid_target_ranges:
|
||||
t_min, t_sep, t_max = target_range.partition(":")
|
||||
if not t_sep:
|
||||
new_target = t_min
|
||||
break
|
||||
elif t_max:
|
||||
new_target = t_max
|
||||
break
|
||||
elif t_min:
|
||||
# TODO: something better than picking first
|
||||
new_target = t_min
|
||||
break
|
||||
|
||||
# Construct new architecture, compute whether spec changed
|
||||
arch_spec = (str(new_plat), str(new_os), str(new_target))
|
||||
new_arch = spack.spec.ArchSpec(arch_spec)
|
||||
spec_changed = new_arch != spec.architecture
|
||||
spec.architecture = new_arch
|
||||
return spec_changed
|
||||
|
||||
def target_from_package_preferences(self, spec):
|
||||
"""Returns the preferred target from the package preferences if
|
||||
there's any.
|
||||
|
||||
Args:
|
||||
spec: abstract spec to be concretized
|
||||
"""
|
||||
target_prefs = PackagePrefs(spec.name, "target")
|
||||
target_specs = [spack.spec.Spec("target=%s" % tname) for tname in archspec.cpu.TARGETS]
|
||||
|
||||
def tspec_filter(s):
|
||||
# Filter target specs by whether the architecture
|
||||
# family is the current machine type. This ensures
|
||||
# we only consider x86_64 targets when on an
|
||||
# x86_64 machine, etc. This may need to change to
|
||||
# enable setting cross compiling as a default
|
||||
target = archspec.cpu.TARGETS[str(s.architecture.target)]
|
||||
arch_family_name = target.family.name
|
||||
return arch_family_name == platform.machine()
|
||||
|
||||
# Sort filtered targets by package prefs
|
||||
target_specs = list(filter(tspec_filter, target_specs))
|
||||
target_specs.sort(key=target_prefs)
|
||||
new_target = target_specs[0].architecture.target
|
||||
return new_target
|
||||
|
||||
def concretize_variants(self, spec):
|
||||
"""If the spec already has variants filled in, return. Otherwise, add
|
||||
the user preferences from packages.yaml or the default variants from
|
||||
the package specification.
|
||||
"""
|
||||
changed = False
|
||||
preferred_variants = PackagePrefs.preferred_variants(spec.name)
|
||||
pkg_cls = spec.package_class
|
||||
for name, entry in pkg_cls.variants.items():
|
||||
variant, when = entry
|
||||
var = spec.variants.get(name, None)
|
||||
if var and "*" in var:
|
||||
# remove variant wildcard before concretizing
|
||||
# wildcard cannot be combined with other variables in a
|
||||
# multivalue variant, a concrete variant cannot have the value
|
||||
# wildcard, and a wildcard does not constrain a variant
|
||||
spec.variants.pop(name)
|
||||
if name not in spec.variants and any(spec.satisfies(w) for w in when):
|
||||
changed = True
|
||||
if name in preferred_variants:
|
||||
spec.variants[name] = preferred_variants.get(name)
|
||||
else:
|
||||
spec.variants[name] = variant.make_default()
|
||||
if name in spec.variants and not any(spec.satisfies(w) for w in when):
|
||||
raise vt.InvalidVariantForSpecError(name, when, spec)
|
||||
|
||||
return changed
|
||||
|
||||
def concretize_compiler(self, spec):
|
||||
"""If the spec already has a compiler, we're done. If not, then take
|
||||
the compiler used for the nearest ancestor with a compiler
|
||||
spec and use that. If the ancestor's compiler is not
|
||||
concrete, then used the preferred compiler as specified in
|
||||
spackconfig.
|
||||
|
||||
Intuition: Use the spackconfig default if no package that depends on
|
||||
this one has a strict compiler requirement. Otherwise, try to
|
||||
build with the compiler that will be used by libraries that
|
||||
link to this one, to maximize compatibility.
|
||||
"""
|
||||
# Pass on concretizing the compiler if the target or operating system
|
||||
# is not yet determined
|
||||
if not spec.architecture.concrete:
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
return True
|
||||
|
||||
# Only use a matching compiler if it is of the proper style
|
||||
# Takes advantage of the proper logic already existing in
|
||||
# compiler_for_spec Should think whether this can be more
|
||||
# efficient
|
||||
def _proper_compiler_style(cspec, aspec):
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, arch_spec=aspec)
|
||||
# If the spec passed as argument is concrete we want to check
|
||||
# the versions match exactly
|
||||
if (
|
||||
cspec.concrete
|
||||
and compilers
|
||||
and cspec.version not in [c.version for c in compilers]
|
||||
):
|
||||
return []
|
||||
|
||||
return compilers
|
||||
|
||||
if spec.compiler and spec.compiler.concrete:
|
||||
if self.check_for_compiler_existence and not _proper_compiler_style(
|
||||
spec.compiler, spec.architecture
|
||||
):
|
||||
_compiler_concretization_failure(spec.compiler, spec.architecture)
|
||||
return False
|
||||
|
||||
# Find another spec that has a compiler, or the root if none do
|
||||
other_spec = spec if spec.compiler else find_spec(spec, lambda x: x.compiler, spec.root)
|
||||
other_compiler = other_spec.compiler
|
||||
assert other_spec
|
||||
|
||||
# Check if the compiler is already fully specified
|
||||
if other_compiler and other_compiler.concrete:
|
||||
if self.check_for_compiler_existence and not _proper_compiler_style(
|
||||
other_compiler, spec.architecture
|
||||
):
|
||||
_compiler_concretization_failure(other_compiler, spec.architecture)
|
||||
spec.compiler = other_compiler
|
||||
return True
|
||||
|
||||
if other_compiler: # Another node has abstract compiler information
|
||||
compiler_list = spack.compilers.find_specs_by_arch(other_compiler, spec.architecture)
|
||||
if not compiler_list:
|
||||
# We don't have a matching compiler installed
|
||||
if not self.check_for_compiler_existence:
|
||||
# Concretize compiler spec versions as a package to build
|
||||
cpkg_spec = spack.compilers.pkg_spec_for_compiler(other_compiler)
|
||||
self.concretize_version(cpkg_spec)
|
||||
spec.compiler = spack.spec.CompilerSpec(
|
||||
other_compiler.name, cpkg_spec.versions
|
||||
)
|
||||
return True
|
||||
else:
|
||||
# No compiler with a satisfactory spec was found
|
||||
raise UnavailableCompilerVersionError(other_compiler, spec.architecture)
|
||||
else:
|
||||
# We have no hints to go by, grab any compiler
|
||||
compiler_list = spack.compilers.all_compiler_specs()
|
||||
if not compiler_list:
|
||||
# Spack has no compilers.
|
||||
raise spack.compilers.NoCompilersError()
|
||||
|
||||
# By default, prefer later versions of compilers
|
||||
compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
|
||||
ppk = PackagePrefs(other_spec.name, "compiler")
|
||||
matches = sorted(compiler_list, key=ppk)
|
||||
|
||||
# copy concrete version into other_compiler
|
||||
try:
|
||||
spec.compiler = next(
|
||||
c for c in matches if _proper_compiler_style(c, spec.architecture)
|
||||
).copy()
|
||||
except StopIteration:
|
||||
# No compiler with a satisfactory spec has a suitable arch
|
||||
_compiler_concretization_failure(other_compiler, spec.architecture)
|
||||
|
||||
assert spec.compiler.concrete
|
||||
return True # things changed.
|
||||
|
||||
def concretize_compiler_flags(self, spec):
|
||||
"""
|
||||
The compiler flags are updated to match those of the spec whose
|
||||
compiler is used, defaulting to no compiler flags in the spec.
|
||||
Default specs set at the compiler level will still be added later.
|
||||
"""
|
||||
# Pass on concretizing the compiler flags if the target or operating
|
||||
# system is not set.
|
||||
if not spec.architecture.concrete:
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
return True
|
||||
|
||||
compiler_match = lambda other: (
|
||||
spec.compiler == other.compiler and spec.architecture == other.architecture
|
||||
)
|
||||
|
||||
ret = False
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
if flag not in spec.compiler_flags:
|
||||
spec.compiler_flags[flag] = list()
|
||||
try:
|
||||
nearest = next(
|
||||
p
|
||||
for p in spec.traverse(direction="parents")
|
||||
if (compiler_match(p) and (p is not spec) and flag in p.compiler_flags)
|
||||
)
|
||||
nearest_flags = nearest.compiler_flags.get(flag, [])
|
||||
flags = spec.compiler_flags.get(flag, [])
|
||||
if set(nearest_flags) - set(flags):
|
||||
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(nearest_flags + flags))
|
||||
ret = True
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
# Include the compiler flag defaults from the config files
|
||||
# This ensures that spack will detect conflicts that stem from a change
|
||||
# in default compiler flags.
|
||||
try:
|
||||
compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
|
||||
except spack.compilers.NoCompilerForSpecError:
|
||||
if self.check_for_compiler_existence:
|
||||
raise
|
||||
return ret
|
||||
for flag in compiler.flags:
|
||||
config_flags = compiler.flags.get(flag, [])
|
||||
flags = spec.compiler_flags.get(flag, [])
|
||||
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(config_flags + flags))
|
||||
if set(config_flags) - set(flags):
|
||||
ret = True
|
||||
|
||||
return ret
|
||||
|
||||
def adjust_target(self, spec):
|
||||
"""Adjusts the target microarchitecture if the compiler is too old
|
||||
to support the default one.
|
||||
|
||||
Args:
|
||||
spec: spec to be concretized
|
||||
|
||||
Returns:
|
||||
True if spec was modified, False otherwise
|
||||
"""
|
||||
# To minimize the impact on performance this function will attempt
|
||||
# to adjust the target only at the very first call once necessary
|
||||
# information is set. It will just return False on subsequent calls.
|
||||
# The way this is achieved is by initializing a generator and making
|
||||
# this function return the next answer.
|
||||
if not (spec.architecture and spec.architecture.concrete):
|
||||
# Not ready, but keep going because we have work to do later
|
||||
return True
|
||||
|
||||
def _make_only_one_call(spec):
|
||||
yield self._adjust_target(spec)
|
||||
while True:
|
||||
yield False
|
||||
|
||||
if self._adjust_target_answer_generator is None:
|
||||
self._adjust_target_answer_generator = _make_only_one_call(spec)
|
||||
|
||||
return next(self._adjust_target_answer_generator)
|
||||
|
||||
def _adjust_target(self, spec):
|
||||
"""Assumes that the architecture and the compiler have been
|
||||
set already and checks if the current target microarchitecture
|
||||
is the default and can be optimized by the compiler.
|
||||
|
||||
If not, downgrades the microarchitecture until a suitable one
|
||||
is found. If none can be found raise an error.
|
||||
|
||||
Args:
|
||||
spec: spec to be concretized
|
||||
|
||||
Returns:
|
||||
True if any modification happened, False otherwise
|
||||
"""
|
||||
import archspec.cpu
|
||||
|
||||
# Try to adjust the target only if it is the default
|
||||
# target for this platform
|
||||
current_target = spec.architecture.target
|
||||
current_platform = spack.platforms.by_name(spec.architecture.platform)
|
||||
|
||||
default_target = current_platform.target("default_target")
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
default_target = self.target_from_package_preferences(spec)
|
||||
|
||||
if current_target != default_target or (
|
||||
self.abstract_spec
|
||||
and self.abstract_spec.architecture
|
||||
and self.abstract_spec.architecture.concrete
|
||||
):
|
||||
return False
|
||||
|
||||
try:
|
||||
current_target.optimization_flags(spec.compiler)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
microarchitecture = current_target.microarchitecture
|
||||
for ancestor in microarchitecture.ancestors:
|
||||
candidate = None
|
||||
try:
|
||||
candidate = spack.target.Target(ancestor)
|
||||
candidate.optimization_flags(spec.compiler)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
continue
|
||||
|
||||
if candidate is not None:
|
||||
msg = (
|
||||
"{0.name}@{0.version} cannot build optimized "
|
||||
'binaries for "{1}". Using best target possible: '
|
||||
'"{2}"'
|
||||
)
|
||||
msg = msg.format(spec.compiler, current_target, candidate)
|
||||
tty.warn(msg)
|
||||
spec.architecture.target = candidate
|
||||
return True
|
||||
else:
|
||||
raise
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -719,19 +82,6 @@ def find_spec(spec, condition, default=None):
|
||||
return default # Nothing matched the condition; return default.
|
||||
|
||||
|
||||
def _compiler_concretization_failure(compiler_spec, arch):
|
||||
# Distinguish between the case that there are compilers for
|
||||
# the arch but not with the given compiler spec and the case that
|
||||
# there are no compilers for the arch at all
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
available_os_targets = set(
|
||||
(c.operating_system, c.target) for c in spack.compilers.all_compilers()
|
||||
)
|
||||
raise NoCompilersForArchError(arch, available_os_targets)
|
||||
else:
|
||||
raise UnavailableCompilerVersionError(compiler_spec, arch)
|
||||
|
||||
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
@@ -744,12 +94,6 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
Returns:
|
||||
List of concretized specs
|
||||
"""
|
||||
if spack.config.get("config:concretizer", "clingo") == "original":
|
||||
return _concretize_specs_together_original(*abstract_specs, **kwargs)
|
||||
return _concretize_specs_together_new(*abstract_specs, **kwargs)
|
||||
|
||||
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
@@ -760,51 +104,6 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
def _concretize_specs_together_original(*abstract_specs, **kwargs):
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
# Split recursive specs, as it seems the concretizer has issue
|
||||
# respecting conditions on dependents expressed like
|
||||
# depends_on('foo ^bar@1.0'), see issue #11160
|
||||
split_specs = [
|
||||
dep.copy(deps=False) for spec1 in abstract_specs for dep in spec1.traverse(root=True)
|
||||
]
|
||||
builder.add_package(
|
||||
"concretizationroot", dependencies=[(str(x), None, None) for x in split_specs]
|
||||
)
|
||||
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec("concretizationroot")
|
||||
concretization_root.concretize(tests=kwargs.get("tests", False))
|
||||
# Retrieve the direct dependencies
|
||||
concrete_specs = [concretization_root[spec.name].copy() for spec in abstract_specs]
|
||||
|
||||
return concrete_specs
|
||||
|
||||
|
||||
class NoCompilersForArchError(spack.error.SpackError):
|
||||
def __init__(self, arch, available_os_targets):
|
||||
err_msg = (
|
||||
"No compilers found"
|
||||
" for operating system %s and target %s."
|
||||
"\nIf previous installations have succeeded, the"
|
||||
" operating system may have been updated." % (arch.os, arch.target)
|
||||
)
|
||||
|
||||
available_os_target_strs = list()
|
||||
for operating_system, t in available_os_targets:
|
||||
os_target_str = "%s-%s" % (operating_system, t) if t else operating_system
|
||||
available_os_target_strs.append(os_target_str)
|
||||
err_msg += (
|
||||
"\nCompilers are defined for the following"
|
||||
" operating systems and targets:\n\t" + "\n\t".join(available_os_target_strs)
|
||||
)
|
||||
|
||||
super().__init__(err_msg, "Run 'spack compiler find' to add compilers.")
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
@@ -820,37 +119,3 @@ def __init__(self, compiler_spec, arch=None):
|
||||
"'spack compilers' to see which compilers are already recognized"
|
||||
" by spack.",
|
||||
)
|
||||
|
||||
|
||||
class NoValidVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no way to have a concrete version for a
|
||||
particular spec."""
|
||||
|
||||
def __init__(self, spec):
|
||||
super().__init__(
|
||||
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)
|
||||
)
|
||||
|
||||
|
||||
class InsufficientArchitectureInfoError(spack.error.SpackError):
|
||||
"""Raised when details on architecture cannot be collected from the
|
||||
system"""
|
||||
|
||||
def __init__(self, spec, archs):
|
||||
super().__init__(
|
||||
"Cannot determine necessary architecture information for '%s': %s"
|
||||
% (spec.name, str(archs))
|
||||
)
|
||||
|
||||
|
||||
class NoBuildError(spack.error.SpecError):
|
||||
"""Raised when a package is configured with the buildable option False, but
|
||||
no satisfactory external versions can be found
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
msg = (
|
||||
"The spec\n '%s'\n is configured as not buildable, "
|
||||
"and no matching external installs were found"
|
||||
)
|
||||
super().__init__(msg % spec)
|
||||
|
||||
@@ -35,11 +35,10 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Type, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
@@ -54,6 +53,7 @@
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.splice
|
||||
import spack.schema.upstreams
|
||||
|
||||
# Hacked yaml for configuration files preserves line numbers.
|
||||
@@ -78,6 +78,7 @@
|
||||
"bootstrap": spack.schema.bootstrap.schema,
|
||||
"ci": spack.schema.ci.schema,
|
||||
"cdash": spack.schema.cdash.schema,
|
||||
"splice": spack.schema.splice.schema,
|
||||
}
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
@@ -100,7 +101,6 @@
|
||||
"dirty": False,
|
||||
"build_jobs": min(16, cpus_available()),
|
||||
"build_stage": "$tempdir/spack-stage",
|
||||
"concretizer": "clingo",
|
||||
"license_dir": spack.paths.default_license_dir,
|
||||
}
|
||||
}
|
||||
@@ -117,21 +117,39 @@
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
"""This class represents a configuration scope.
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.writable = False
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
A scope is one directory containing named configuration files.
|
||||
Each file is a config "section" (e.g., mirrors, compilers, etc.).
|
||||
"""
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, name, path) -> None:
|
||||
self.name = name # scope name.
|
||||
self.path = path # path to directory containing configs.
|
||||
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
raise NotImplementedError
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return os.sep in self.name
|
||||
return False
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}>"
|
||||
|
||||
|
||||
class DirectoryConfigScope(ConfigScope):
|
||||
"""Config scope backed by a directory containing one file per section."""
|
||||
|
||||
def __init__(self, name: str, path: str, *, writable: bool = True) -> None:
|
||||
super().__init__(name)
|
||||
self.path = path
|
||||
self.writable = writable
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
"""Returns the filename associated with a given section"""
|
||||
@@ -148,14 +166,15 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
return self.sections[section]
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
filename = self.get_section_filename(section)
|
||||
data = self.get_section(section)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
filesystem.mkdirp(self.path)
|
||||
@@ -164,19 +183,23 @@ def _write_section(self, section: str) -> None:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}: {self.path}>"
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return "/" in self.name
|
||||
|
||||
|
||||
class SingleFileScope(ConfigScope):
|
||||
"""This class represents a configuration scope in a single YAML file."""
|
||||
|
||||
def __init__(
|
||||
self, name: str, path: str, schema: YamlConfigDict, yaml_path: Optional[List[str]] = None
|
||||
self,
|
||||
name: str,
|
||||
path: str,
|
||||
schema: YamlConfigDict,
|
||||
*,
|
||||
yaml_path: Optional[List[str]] = None,
|
||||
writable: bool = True,
|
||||
) -> None:
|
||||
"""Similar to ``ConfigScope`` but can be embedded in another schema.
|
||||
|
||||
@@ -195,15 +218,13 @@ def __init__(
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
"""
|
||||
super().__init__(name, path)
|
||||
super().__init__(name)
|
||||
self._raw_data: Optional[YamlConfigDict] = None
|
||||
self.schema = schema
|
||||
self.path = path
|
||||
self.writable = writable
|
||||
self.yaml_path = yaml_path or []
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_section_filename(self, section) -> str:
|
||||
return self.path
|
||||
|
||||
@@ -257,6 +278,8 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
||||
|
||||
# If there is no existing data, this section SingleFileScope has never
|
||||
@@ -301,19 +324,6 @@ def __repr__(self) -> str:
|
||||
return f"<SingleFileScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class ImmutableConfigScope(ConfigScope):
|
||||
"""A configuration scope that cannot be written to.
|
||||
|
||||
This is used for ConfigScopes passed on the command line.
|
||||
"""
|
||||
|
||||
def _write_section(self, section) -> None:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ImmutableConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class InternalConfigScope(ConfigScope):
|
||||
"""An internal configuration scope that is not persisted to a file.
|
||||
|
||||
@@ -323,7 +333,7 @@ class InternalConfigScope(ConfigScope):
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
super().__init__(name, None)
|
||||
super().__init__(name)
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
if data is not None:
|
||||
@@ -333,9 +343,6 @@ def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError("Cannot get filename for InternalConfigScope.")
|
||||
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
"""Just reads from an internal dictionary."""
|
||||
if section not in self.sections:
|
||||
@@ -440,27 +447,21 @@ def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||
return scope
|
||||
|
||||
@property
|
||||
def file_scopes(self) -> List[ConfigScope]:
|
||||
"""List of writable scopes with an associated file."""
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
||||
"""Generator of writable scopes with an associated file."""
|
||||
return (s for s in self.scopes.values() if s.writable)
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
"""Non-internal scope with highest precedence."""
|
||||
return next(reversed(self.file_scopes))
|
||||
"""Writable scope with highest precedence."""
|
||||
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
||||
|
||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||
"""Non-internal non-platform scope with highest precedence
|
||||
|
||||
Platform-specific scopes are of the form scope/platform"""
|
||||
generator = reversed(self.file_scopes)
|
||||
highest = next(generator)
|
||||
while highest and highest.is_platform_dependent:
|
||||
highest = next(generator)
|
||||
return highest
|
||||
"""Writable non-platform scope with highest precedence"""
|
||||
return next(
|
||||
s
|
||||
for s in reversed(self.scopes.values()) # type: ignore
|
||||
if s.writable and not s.is_platform_dependent
|
||||
)
|
||||
|
||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||
"""
|
||||
@@ -755,13 +756,14 @@ def override(
|
||||
|
||||
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], scope_type: Type[ConfigScope], name: str, path: str
|
||||
cfg: Union[Configuration, lang.Singleton], name: str, path: str, writable: bool = True
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
plat_name = os.path.join(name, platform)
|
||||
plat_path = os.path.join(path, platform)
|
||||
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||
scope = DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||
)
|
||||
cfg.push_scope(scope)
|
||||
|
||||
|
||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
@@ -792,22 +794,27 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
"""Add additional scopes from the --config-scope argument.
|
||||
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
||||
import spack.environment.environment as env # circular import
|
||||
|
||||
Command line scopes are named after their position in the arg list.
|
||||
"""
|
||||
for i, path in enumerate(command_line_scopes):
|
||||
# We ensure that these scopes exist and are readable, as they are
|
||||
# provided on the command line by the user.
|
||||
if not os.path.isdir(path):
|
||||
raise ConfigError(f"config scope is not a directory: '{path}'")
|
||||
elif not os.access(path, os.R_OK):
|
||||
raise ConfigError(f"config scope is not readable: '{path}'")
|
||||
name = f"cmd_scope_{i}"
|
||||
|
||||
# name based on order on the command line
|
||||
name = f"cmd_scope_{i:d}"
|
||||
cfg.push_scope(ImmutableConfigScope(name, path))
|
||||
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
|
||||
if env.exists(path): # managed environment
|
||||
manifest = env.EnvironmentManifestFile(env.root(path))
|
||||
elif env.is_env_dir(path): # anonymous environment
|
||||
manifest = env.EnvironmentManifestFile(path)
|
||||
elif os.path.isdir(path): # directory with config files
|
||||
cfg.push_scope(DirectoryConfigScope(name, path, writable=False))
|
||||
_add_platform_scope(cfg, name, path, writable=False)
|
||||
continue
|
||||
else:
|
||||
raise ConfigError(f"Invalid configuration scope: {path}")
|
||||
|
||||
for scope in manifest.env_config_scopes:
|
||||
scope.name = f"{name}:{scope.name}"
|
||||
scope.writable = False
|
||||
cfg.push_scope(scope)
|
||||
|
||||
|
||||
def create() -> Configuration:
|
||||
@@ -851,10 +858,10 @@ def create() -> Configuration:
|
||||
|
||||
# add each scope and its platform-specific directory
|
||||
for name, path in configuration_paths:
|
||||
cfg.push_scope(ConfigScope(name, path))
|
||||
cfg.push_scope(DirectoryConfigScope(name, path))
|
||||
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
_add_platform_scope(cfg, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
@@ -969,7 +976,7 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
def add_default_platform_scope(platform: str) -> None:
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
CONFIG.push_scope(DirectoryConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes() -> Dict[str, ConfigScope]:
|
||||
@@ -978,19 +985,10 @@ def scopes() -> Dict[str, ConfigScope]:
|
||||
|
||||
|
||||
def writable_scopes() -> List[ConfigScope]:
|
||||
"""
|
||||
Return list of writable scopes. Higher-priority scopes come first in the
|
||||
list.
|
||||
"""
|
||||
return list(
|
||||
reversed(
|
||||
list(
|
||||
x
|
||||
for x in CONFIG.scopes.values()
|
||||
if not isinstance(x, (InternalConfigScope, ImmutableConfigScope))
|
||||
)
|
||||
)
|
||||
)
|
||||
"""Return list of writable scopes. Higher-priority scopes come first in the list."""
|
||||
scopes = [x for x in CONFIG.scopes.values() if x.writable]
|
||||
scopes.reverse()
|
||||
return scopes
|
||||
|
||||
|
||||
def writable_scope_names() -> List[str]:
|
||||
@@ -1080,11 +1078,8 @@ def validate(
|
||||
"""
|
||||
import jsonschema
|
||||
|
||||
# Validate a copy to avoid adding defaults
|
||||
# This allows us to round-trip data without adding to it.
|
||||
test_data = syaml.deepcopy(data)
|
||||
try:
|
||||
spack.schema.Validator(schema).validate(test_data)
|
||||
spack.schema.Validator(schema).validate(data)
|
||||
except jsonschema.ValidationError as e:
|
||||
if hasattr(e.instance, "lc"):
|
||||
line_number = e.instance.lc.line + 1
|
||||
@@ -1093,7 +1088,7 @@ def validate(
|
||||
raise ConfigFormatError(e, data, filename, line_number) from e
|
||||
# return the validated data so that we can access the raw data
|
||||
# mostly relevant for environments
|
||||
return test_data
|
||||
return data
|
||||
|
||||
|
||||
def read_config_file(
|
||||
@@ -1599,7 +1594,7 @@ def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuratio
|
||||
path = os.path.normpath(scope_or_path)
|
||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||
name = os.path.basename(path)
|
||||
scopes.append(ConfigScope(name, path))
|
||||
scopes.append(DirectoryConfigScope(name, path))
|
||||
|
||||
configuration = Configuration(*scopes)
|
||||
return configuration
|
||||
|
||||
@@ -78,24 +78,17 @@
|
||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||
}
|
||||
},
|
||||
"centos:stream": {
|
||||
"centos:stream9": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
"template": "container/centos_stream9.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream",
|
||||
"build": "spack/centos-stream9",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_7.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"build": "spack/centos7"
|
||||
},
|
||||
"opensuse/leap:15": {
|
||||
"bootstrap": {
|
||||
"template": "container/leap-15.dockerfile"
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
executable_prefix,
|
||||
set_virtuals_nonbuildable,
|
||||
update_configuration,
|
||||
)
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
@@ -12,5 +17,6 @@
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
"set_virtuals_nonbuildable",
|
||||
"detection_tests",
|
||||
]
|
||||
|
||||
@@ -136,10 +136,10 @@ def path_to_dict(search_paths: List[str]):
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
try:
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if llnl.util.filesystem.is_readable_file(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
with os.scandir(search_path) as entries:
|
||||
path_to_lib.update(
|
||||
{entry.path: entry.name for entry in entries if entry.is_file()}
|
||||
)
|
||||
except OSError as e:
|
||||
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||
llnl.util.tty.debug(msg)
|
||||
@@ -252,6 +252,27 @@ def update_configuration(
|
||||
return all_new_specs
|
||||
|
||||
|
||||
def set_virtuals_nonbuildable(virtuals: Set[str], scope: Optional[str] = None) -> List[str]:
|
||||
"""Update packages:virtual:buildable:False for the provided virtual packages, if the property
|
||||
is not set by the user. Returns the list of virtual packages that have been updated."""
|
||||
packages = spack.config.get("packages")
|
||||
new_config = {}
|
||||
for virtual in virtuals:
|
||||
# If the user has set the buildable prop do not override it
|
||||
if virtual in packages and "buildable" in packages[virtual]:
|
||||
continue
|
||||
new_config[virtual] = {"buildable": False}
|
||||
|
||||
# Update the provided scope
|
||||
spack.config.set(
|
||||
"packages",
|
||||
spack.config.merge_yaml(spack.config.get("packages", scope=scope), new_config),
|
||||
scope=scope,
|
||||
)
|
||||
|
||||
return list(new_config.keys())
|
||||
|
||||
|
||||
def _windows_drive() -> str:
|
||||
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
||||
which is guaranteed to be defined for all logins.
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Type
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
@@ -187,7 +187,7 @@ def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[s
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
def _group_by_prefix(paths: List[str]) -> Dict[str, Set[str]]:
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
@@ -200,7 +200,7 @@ class Finder:
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
|
||||
Args:
|
||||
@@ -226,7 +226,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
@@ -243,7 +243,9 @@ def detect_specs(
|
||||
return []
|
||||
|
||||
result = []
|
||||
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||
for candidate_path, items_in_prefix in _group_by_prefix(
|
||||
llnl.util.lang.dedupe(paths)
|
||||
).items():
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
@@ -299,19 +301,17 @@ def detect_specs(
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||
self, *, pkg_name: str, repository, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
if None, default paths are searched. If this
|
||||
is an empty list, nothing will be searched.
|
||||
repository: repository to retrieve the package
|
||||
initial_guess: initial list of paths to search from the caller if None, default paths
|
||||
are searched. If this is an empty list, nothing will be searched.
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = repository.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
@@ -327,7 +327,7 @@ class ExecutablesFinder(Finder):
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return spack.util.environment.get_path("PATH")
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
result = pkg.platform_executables()
|
||||
@@ -335,13 +335,10 @@ def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in executables_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return list(sorted(set(result)))
|
||||
joined_pattern = re.compile(r"|".join(patterns))
|
||||
result = [path for path, exe in executables_by_path.items() if joined_pattern.search(exe)]
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
@@ -356,7 +353,7 @@ class LibrariesFinder(Finder):
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||
"""
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "libraries"):
|
||||
result = pkg.libraries
|
||||
@@ -385,7 +382,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: List[str],
|
||||
packages_to_search: Iterable[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
@@ -399,19 +396,28 @@ def by_path(
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
repository = spack.repo.PATH.ensure_unwrapped()
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
executables_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
libraries_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
|
||||
@@ -81,7 +81,17 @@ class OpenMpi(Package):
|
||||
]
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
reserved_names = ["patches", "dev_path"]
|
||||
reserved_names = [
|
||||
"arch",
|
||||
"architecture",
|
||||
"dev_path",
|
||||
"namespace",
|
||||
"operating_system",
|
||||
"os",
|
||||
"patches",
|
||||
"platform",
|
||||
"target",
|
||||
]
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
#: Some directives leverage others and in that case are not automatically added.
|
||||
@@ -90,14 +100,14 @@ class OpenMpi(Package):
|
||||
_patch_order_index = 0
|
||||
|
||||
|
||||
SpecType = Union["spack.spec.Spec", str]
|
||||
SpecType = str
|
||||
DepType = Union[Tuple[str, ...], str]
|
||||
WhenType = Optional[Union["spack.spec.Spec", str, bool]]
|
||||
Patcher = Callable[[Union["spack.package_base.PackageBase", Dependency]], None]
|
||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||
|
||||
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
||||
|
||||
|
||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
@@ -475,7 +485,7 @@ def _execute_version(pkg, ver, **kwargs):
|
||||
|
||||
def _depends_on(
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
spec: SpecType,
|
||||
spec: "spack.spec.Spec",
|
||||
*,
|
||||
when: WhenType = None,
|
||||
type: DepType = dt.DEFAULT_TYPES,
|
||||
@@ -485,11 +495,10 @@ def _depends_on(
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
dep_spec = spack.spec.Spec(spec)
|
||||
if not dep_spec.name:
|
||||
raise DependencyError("Invalid dependency specification in package '%s':" % pkg.name, spec)
|
||||
if pkg.name == dep_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
|
||||
if not spec.name:
|
||||
raise DependencyError(f"Invalid dependency specification in package '{pkg.name}':", spec)
|
||||
if pkg.name == spec.name:
|
||||
raise CircularReferenceError(f"Package '{pkg.name}' cannot depend on itself.")
|
||||
|
||||
depflag = dt.canonicalize(type)
|
||||
|
||||
@@ -505,7 +514,7 @@ def _depends_on(
|
||||
# ensure `Spec.virtual` is a valid thing to call in a directive.
|
||||
# For now, we comment out the following check to allow for virtual packages
|
||||
# with package files.
|
||||
# if patches and dep_spec.virtual:
|
||||
# if patches and spec.virtual:
|
||||
# raise DependencyPatchError("Cannot patch a virtual dependency.")
|
||||
|
||||
# ensure patches is a list
|
||||
@@ -520,13 +529,13 @@ def _depends_on(
|
||||
|
||||
# this is where we actually add the dependency to this package
|
||||
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
||||
dependency = deps_by_name.get(dep_spec.name)
|
||||
dependency = deps_by_name.get(spec.name)
|
||||
|
||||
if not dependency:
|
||||
dependency = Dependency(pkg, dep_spec, depflag=depflag)
|
||||
deps_by_name[dep_spec.name] = dependency
|
||||
dependency = Dependency(pkg, spec, depflag=depflag)
|
||||
deps_by_name[spec.name] = dependency
|
||||
else:
|
||||
dependency.spec.constrain(dep_spec, deps=False)
|
||||
dependency.spec.constrain(spec, deps=False)
|
||||
dependency.depflag |= depflag
|
||||
|
||||
# apply patches to the dependency
|
||||
@@ -591,12 +600,13 @@ def depends_on(
|
||||
@see The section "Dependency specs" in the Spack Packaging Guide.
|
||||
|
||||
"""
|
||||
if spack.spec.Spec(spec).name in SUPPORTED_LANGUAGES:
|
||||
dep_spec = spack.spec.Spec(spec)
|
||||
if dep_spec.name in SUPPORTED_LANGUAGES:
|
||||
assert type == "build", "languages must be of 'build' type"
|
||||
return _language(lang_spec_str=spec, when=when)
|
||||
|
||||
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
||||
|
||||
return _execute_depends_on
|
||||
|
||||
@@ -666,25 +676,24 @@ def extends(spec, when=None, type=("build", "run"), patches=None):
|
||||
|
||||
keyword arguments can be passed to extends() so that extension
|
||||
packages can pass parameters to the extendee's extension
|
||||
mechanism.
|
||||
|
||||
"""
|
||||
mechanism."""
|
||||
|
||||
def _execute_extends(pkg):
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
spec_obj = spack.spec.Spec(spec)
|
||||
dep_spec = spack.spec.Spec(spec)
|
||||
|
||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
||||
|
||||
# When extending python, also add a dependency on python-venv. This is done so that
|
||||
# Spack environment views are Python virtual environments.
|
||||
if spec_obj.name == "python" and not pkg.name == "python-venv":
|
||||
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
|
||||
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
||||
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
||||
|
||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
||||
pkg.extendees[dep_spec.name] = (dep_spec, None)
|
||||
|
||||
return _execute_extends
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import collections
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import copy
|
||||
import errno
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
@@ -24,6 +24,7 @@
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -268,9 +269,7 @@ def root(name):
|
||||
|
||||
def exists(name):
|
||||
"""Whether an environment with this name exists or not."""
|
||||
if not valid_env_name(name):
|
||||
return False
|
||||
return os.path.isdir(root(name))
|
||||
return valid_env_name(name) and os.path.isdir(_root(name))
|
||||
|
||||
|
||||
def active(name):
|
||||
@@ -529,8 +528,8 @@ def _read_yaml(str_or_file):
|
||||
)
|
||||
|
||||
filename = getattr(str_or_file, "name", None)
|
||||
default_data = spack.config.validate(data, spack.schema.env.schema, filename)
|
||||
return data, default_data
|
||||
spack.config.validate(data, spack.schema.env.schema, filename)
|
||||
return data
|
||||
|
||||
|
||||
def _write_yaml(data, str_or_file):
|
||||
@@ -790,6 +789,23 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
root_dirname = os.path.dirname(self.root)
|
||||
tmp_symlink_name = os.path.join(root_dirname, "._view_link")
|
||||
|
||||
# Remove self.root if is it an empty dir, since we need a symlink there. Note that rmdir
|
||||
# fails if self.root is a symlink.
|
||||
try:
|
||||
os.rmdir(self.root)
|
||||
except (FileNotFoundError, NotADirectoryError):
|
||||
pass
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOTEMPTY:
|
||||
msg = "it is a non-empty directory"
|
||||
elif e.errno == errno.EACCES:
|
||||
msg = "of insufficient permissions"
|
||||
else:
|
||||
raise
|
||||
raise SpackEnvironmentViewError(
|
||||
f"The environment view in {self.root} cannot not be created because {msg}."
|
||||
) from e
|
||||
|
||||
# Create a new view
|
||||
try:
|
||||
fs.mkdirp(new_root)
|
||||
@@ -921,7 +937,7 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
def _load_manifest_file(self):
|
||||
"""Instantiate and load the manifest file contents into memory."""
|
||||
with lk.ReadTransaction(self.txlock):
|
||||
self.manifest = EnvironmentManifestFile(self.path)
|
||||
self.manifest = EnvironmentManifestFile(self.path, self.name)
|
||||
with self.manifest.use_config():
|
||||
self._read()
|
||||
|
||||
@@ -958,18 +974,25 @@ def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||
|
||||
def _process_definition(self, item):
|
||||
def _process_definition(self, entry):
|
||||
"""Process a single spec definition item."""
|
||||
entry = copy.deepcopy(item)
|
||||
when = _eval_conditional(entry.pop("when", "True"))
|
||||
assert len(entry) == 1
|
||||
when_string = entry.get("when")
|
||||
if when_string is not None:
|
||||
when = _eval_conditional(when_string)
|
||||
assert len([x for x in entry if x != "when"]) == 1
|
||||
else:
|
||||
when = True
|
||||
assert len(entry) == 1
|
||||
|
||||
if when:
|
||||
name, spec_list = next(iter(entry.items()))
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
for name, spec_list in entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||
"""Process view option(s), which can be boolean, string, or None.
|
||||
@@ -1191,7 +1214,6 @@ def scope_name(self):
|
||||
def include_concrete_envs(self):
|
||||
"""Copy and save the included envs' specs internally"""
|
||||
|
||||
lockfile_meta = None
|
||||
root_hash_seen = set()
|
||||
concrete_hash_seen = set()
|
||||
self.included_concrete_spec_data = {}
|
||||
@@ -1202,37 +1224,26 @@ def include_concrete_envs(self):
|
||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||
|
||||
env = Environment(env_path)
|
||||
|
||||
with open(env.lock_path) as f:
|
||||
lockfile_as_dict = env._read_lockfile(f)
|
||||
|
||||
# Lockfile_meta must match each env and use at least format version 5
|
||||
if lockfile_meta is None:
|
||||
lockfile_meta = lockfile_as_dict["_meta"]
|
||||
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||
elif lockfile_meta["lockfile-version"] < 5:
|
||||
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
|
||||
|
||||
# Copy unique root specs from env
|
||||
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||
for root_dict in lockfile_as_dict["roots"]:
|
||||
for root_dict in env._concrete_roots_dict():
|
||||
if root_dict["hash"] not in root_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||
root_hash_seen.add(root_dict["hash"])
|
||||
|
||||
# Copy unique concrete specs from env
|
||||
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||
if concrete_spec not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path].update(
|
||||
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||
for dag_hash, spec_details in env._concrete_specs_dict().items():
|
||||
if dag_hash not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
|
||||
{dag_hash: spec_details}
|
||||
)
|
||||
concrete_hash_seen.add(concrete_spec)
|
||||
concrete_hash_seen.add(dag_hash)
|
||||
|
||||
if "include_concrete" in lockfile_as_dict.keys():
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||
"include_concrete"
|
||||
]
|
||||
# Copy transitive include data
|
||||
transitive = env.included_concrete_spec_data
|
||||
if transitive:
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
|
||||
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
self.write()
|
||||
@@ -1621,9 +1632,8 @@ def _concretize_separately(self, tests=False):
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
@@ -2151,8 +2161,7 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
|
||||
return specs
|
||||
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
def _concrete_specs_dict(self):
|
||||
concrete_specs = {}
|
||||
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
@@ -2160,7 +2169,22 @@ def _to_lockfile_dict(self):
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[s.dag_hash()] = spec_dict
|
||||
|
||||
if s.build_spec is not s:
|
||||
for d in s.build_spec.traverse():
|
||||
build_spec_dict = d.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
build_spec_dict[ht.dag_hash.name] = d.dag_hash()
|
||||
concrete_specs[d.dag_hash()] = build_spec_dict
|
||||
|
||||
return concrete_specs
|
||||
|
||||
def _concrete_roots_dict(self):
|
||||
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
||||
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
|
||||
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
concrete_specs = self._concrete_specs_dict()
|
||||
root_specs = self._concrete_roots_dict()
|
||||
|
||||
spack_dict = {"version": spack.spack_version}
|
||||
spack_commit = spack.main.get_spack_commit()
|
||||
@@ -2181,7 +2205,7 @@ def _to_lockfile_dict(self):
|
||||
# spack version information
|
||||
"spack": spack_dict,
|
||||
# users specs + hashes are the 'roots' of the environment
|
||||
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
||||
"roots": root_specs,
|
||||
# Concrete specs by hash, including dependencies
|
||||
"concrete_specs": concrete_specs,
|
||||
}
|
||||
@@ -2310,7 +2334,7 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[lockfile_key] = spec
|
||||
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
# and add them to the spec, including build specs
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
@@ -2318,6 +2342,10 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
if "build_spec" in node_dict:
|
||||
_, bhash, _ = reader.build_spec_from_node_dict(node_dict)
|
||||
specs_by_hash[lockfile_key]._build_spec = specs_by_hash[bhash]
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
@@ -2542,7 +2570,7 @@ def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath()
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
|
||||
if os.path.isdir(root):
|
||||
for repo_root in os.listdir(root):
|
||||
@@ -2551,7 +2579,7 @@ def make_repo_path(root):
|
||||
if not os.path.isdir(repo_root):
|
||||
continue
|
||||
|
||||
repo = spack.repo.Repo(repo_root)
|
||||
repo = spack.repo.from_path(repo_root)
|
||||
path.put_last(repo)
|
||||
|
||||
return path
|
||||
@@ -2752,10 +2780,11 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
||||
manifest.flush()
|
||||
return manifest
|
||||
|
||||
def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] = None) -> None:
|
||||
self.manifest_dir = pathlib.Path(manifest_dir)
|
||||
self.name = name or str(manifest_dir)
|
||||
self.manifest_file = self.manifest_dir / manifest_name
|
||||
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||
self.scope_name = f"env:{self.name}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
#: Configuration scopes associated with this environment. Note that these are not
|
||||
@@ -2767,12 +2796,8 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
with self.manifest_file.open() as f:
|
||||
raw, with_defaults_added = _read_yaml(f)
|
||||
self.yaml_content = _read_yaml(f)
|
||||
|
||||
#: Pristine YAML content, without defaults being added
|
||||
self.pristine_yaml_content = raw
|
||||
#: YAML content with defaults added by Spack, if they're missing
|
||||
self.yaml_content = with_defaults_added
|
||||
self.changed = False
|
||||
|
||||
def _all_matches(self, user_spec: str) -> List[str]:
|
||||
@@ -2786,7 +2811,7 @@ def _all_matches(self, user_spec: str) -> List[str]:
|
||||
ValueError: if no equivalent match is found
|
||||
"""
|
||||
result = []
|
||||
for yaml_spec_str in self.pristine_configuration["specs"]:
|
||||
for yaml_spec_str in self.configuration["specs"]:
|
||||
if Spec(yaml_spec_str) == Spec(user_spec):
|
||||
result.append(yaml_spec_str)
|
||||
|
||||
@@ -2801,7 +2826,6 @@ def add_user_spec(self, user_spec: str) -> None:
|
||||
Args:
|
||||
user_spec: user spec to be appended
|
||||
"""
|
||||
self.pristine_configuration.setdefault("specs", []).append(user_spec)
|
||||
self.configuration.setdefault("specs", []).append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
@@ -2816,7 +2840,6 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
"""
|
||||
try:
|
||||
for key in self._all_matches(user_spec):
|
||||
self.pristine_configuration["specs"].remove(key)
|
||||
self.configuration["specs"].remove(key)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
@@ -2834,7 +2857,6 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
SpackEnvironmentError: when the user spec cannot be overridden
|
||||
"""
|
||||
try:
|
||||
self.pristine_configuration["specs"][idx] = user_spec
|
||||
self.configuration["specs"][idx] = user_spec
|
||||
except ValueError as e:
|
||||
msg = f"cannot override {user_spec} from {self}"
|
||||
@@ -2847,10 +2869,10 @@ def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
"""
|
||||
self.pristine_configuration[included_concrete_name] = []
|
||||
self.configuration[included_concrete_name] = []
|
||||
|
||||
for env_path in include_concrete:
|
||||
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||
self.configuration[included_concrete_name].append(env_path)
|
||||
|
||||
self.changed = True
|
||||
|
||||
@@ -2864,14 +2886,13 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
Raises:
|
||||
SpackEnvironmentError: is no valid definition exists already
|
||||
"""
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
defs = self.configuration.get("definitions", [])
|
||||
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
self.configuration["definitions"][idx][list_name].append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@@ -2885,7 +2906,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
SpackEnvironmentError: if the user spec cannot be removed from the list,
|
||||
or the list does not exist
|
||||
"""
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
defs = self.configuration.get("definitions", [])
|
||||
msg = (
|
||||
f"cannot remove {user_spec} from the '{list_name}' definition, "
|
||||
f"no valid list exists"
|
||||
@@ -2898,7 +2919,6 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.configuration["definitions"][idx][list_name].remove(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@@ -2913,7 +2933,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
Raises:
|
||||
SpackEnvironmentError: if the user spec cannot be overridden
|
||||
"""
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
defs = self.configuration.get("definitions", [])
|
||||
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
@@ -2924,7 +2944,6 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.configuration["definitions"][idx][list_name][sub_index] = override
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
@@ -2956,7 +2975,6 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
||||
True the default view is used for the environment, if False there's no view.
|
||||
"""
|
||||
if isinstance(view, dict):
|
||||
self.pristine_configuration["view"][default_view_name].update(view)
|
||||
self.configuration["view"][default_view_name].update(view)
|
||||
self.changed = True
|
||||
return
|
||||
@@ -2964,15 +2982,13 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
||||
if not isinstance(view, bool):
|
||||
view = str(view)
|
||||
|
||||
self.pristine_configuration["view"] = view
|
||||
self.configuration["view"] = view
|
||||
self.changed = True
|
||||
|
||||
def remove_default_view(self) -> None:
|
||||
"""Removes the default view from the manifest file"""
|
||||
view_data = self.pristine_configuration.get("view")
|
||||
view_data = self.configuration.get("view")
|
||||
if isinstance(view_data, collections.abc.Mapping):
|
||||
self.pristine_configuration["view"].pop(default_view_name)
|
||||
self.configuration["view"].pop(default_view_name)
|
||||
self.changed = True
|
||||
return
|
||||
@@ -2985,17 +3001,12 @@ def flush(self) -> None:
|
||||
return
|
||||
|
||||
with fs.write_tmp_and_move(os.path.realpath(self.manifest_file)) as f:
|
||||
_write_yaml(self.pristine_yaml_content, f)
|
||||
_write_yaml(self.yaml_content, f)
|
||||
self.changed = False
|
||||
|
||||
@property
|
||||
def pristine_configuration(self):
|
||||
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
||||
return self.pristine_yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
@property
|
||||
def configuration(self):
|
||||
"""Return the dictionaries in the YAML, without the top level attribute"""
|
||||
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
||||
return self.yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
def __len__(self):
|
||||
@@ -3027,12 +3038,11 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
SpackEnvironmentError: if the manifest includes a remote file but
|
||||
no configuration stage directory has been identified
|
||||
"""
|
||||
scopes = []
|
||||
scopes: List[spack.config.ConfigScope] = []
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = self[TOP_LEVEL_KEY].get("include", [])
|
||||
env_name = environment_name(self.manifest_dir)
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
@@ -3095,24 +3105,22 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = "env:%s:%s" % (env_name, os.path.basename(config_path))
|
||||
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
|
||||
scope = spack.config.ConfigScope(config_name, config_path)
|
||||
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
|
||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = "env:%s:%s" % (env_name, config_path)
|
||||
tty.debug(
|
||||
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
|
||||
)
|
||||
scope = spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
config_name = f"env:{self.name}:{config_path}"
|
||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||
scopes.append(
|
||||
spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
)
|
||||
else:
|
||||
missing.append(config_path)
|
||||
continue
|
||||
|
||||
scopes.append(scope)
|
||||
|
||||
if missing:
|
||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||
msg += "\n {0}".format("\n ".join(missing))
|
||||
@@ -3129,7 +3137,10 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
*self.included_config_scopes,
|
||||
spack.config.SingleFileScope(
|
||||
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
self.scope_name,
|
||||
str(self.manifest_file),
|
||||
spack.schema.env.schema,
|
||||
yaml_path=[TOP_LEVEL_KEY],
|
||||
),
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -53,7 +54,7 @@
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
@@ -245,38 +246,30 @@ class URLFetchStrategy(FetchStrategy):
|
||||
|
||||
# these are checksum types. The generic 'checksum' is deprecated for
|
||||
# specific hash names, but we need it for backward compatibility
|
||||
optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
|
||||
optional_attrs = [*crypto.hashes.keys(), "checksum"]
|
||||
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
|
||||
# Prefer values in kwargs to the positionals.
|
||||
self.url = kwargs.get("url", url)
|
||||
self.url = url
|
||||
self.mirrors = kwargs.get("mirrors", [])
|
||||
|
||||
# digest can be set as the first argument, or from an explicit
|
||||
# kwarg by the hash name.
|
||||
self.digest = kwargs.get("checksum", checksum)
|
||||
self.digest: Optional[str] = checksum
|
||||
for h in self.optional_attrs:
|
||||
if h in kwargs:
|
||||
self.digest = kwargs[h]
|
||||
|
||||
self.expand_archive = kwargs.get("expand", True)
|
||||
self.extra_options = kwargs.get("fetch_options", {})
|
||||
self._curl = None
|
||||
|
||||
self.extension = kwargs.get("extension", None)
|
||||
|
||||
if not self.url:
|
||||
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
||||
self.expand_archive: bool = kwargs.get("expand", True)
|
||||
self.extra_options: dict = kwargs.get("fetch_options", {})
|
||||
self._curl: Optional[Executable] = None
|
||||
self.extension: Optional[str] = kwargs.get("extension", None)
|
||||
|
||||
@property
|
||||
def curl(self):
|
||||
def curl(self) -> Executable:
|
||||
if not self._curl:
|
||||
try:
|
||||
self._curl = which("curl", required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
self._curl = web_util.require_curl()
|
||||
return self._curl
|
||||
|
||||
def source_id(self):
|
||||
@@ -297,27 +290,23 @@ def candidate_urls(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
return
|
||||
|
||||
url = None
|
||||
errors = []
|
||||
errors: List[Exception] = []
|
||||
for url in self.candidate_urls:
|
||||
if not web_util.url_exists(url):
|
||||
tty.debug("URL does not exist: " + url)
|
||||
continue
|
||||
|
||||
try:
|
||||
self._fetch_from_url(url)
|
||||
break
|
||||
except FailedDownloadError as e:
|
||||
errors.append(str(e))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
errors.extend(e.exceptions)
|
||||
else:
|
||||
raise FailedDownloadError(*errors)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(url)
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
@@ -336,27 +325,28 @@ def _check_headers(self, headers):
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
save_file = self.stage.save_filename
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
|
||||
try:
|
||||
url, headers, response = web_util.read_from_url(url)
|
||||
except web_util.SpackWebError as e:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
msg = "urllib failed to fetch with error {0}".format(e)
|
||||
raise FailedDownloadError(url, msg)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.msg(f"Fetching {url}")
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
with open(save_file, "wb") as _open_file:
|
||||
shutil.copyfileobj(response, _open_file)
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
self._check_headers(str(headers))
|
||||
self._check_headers(str(response.headers))
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
@@ -365,7 +355,7 @@ def _fetch_curl(self, url):
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
partial_file = self.stage.save_filename + ".part"
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
tty.msg(f"Fetching {url}")
|
||||
if partial_file:
|
||||
save_args = [
|
||||
"-C",
|
||||
@@ -405,8 +395,8 @@ def _fetch_curl(self, url):
|
||||
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except spack.error.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
except spack.error.FetchError as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
self._check_headers(headers)
|
||||
|
||||
@@ -473,7 +463,7 @@ def check(self):
|
||||
"""Check the downloaded archive against a checksum digest.
|
||||
No-op if this stage checks code out of a repository."""
|
||||
if not self.digest:
|
||||
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
|
||||
raise NoDigestError(f"Attempt to check {self.__class__.__name__} with no digest.")
|
||||
|
||||
verify_checksum(self.archive_file, self.digest)
|
||||
|
||||
@@ -484,8 +474,8 @@ def reset(self):
|
||||
"""
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError(
|
||||
"Tried to reset URLFetchStrategy before fetching",
|
||||
"Failed on reset() for URL %s" % self.url,
|
||||
f"Tried to reset {self.__class__.__name__} before fetching",
|
||||
f"Failed on reset() for URL{self.url}",
|
||||
)
|
||||
|
||||
# Remove everything but the archive from the stage
|
||||
@@ -498,14 +488,10 @@ def reset(self):
|
||||
self.expand()
|
||||
|
||||
def __repr__(self):
|
||||
url = self.url if self.url else "no url"
|
||||
return "%s<%s>" % (self.__class__.__name__, url)
|
||||
return f"{self.__class__.__name__}<{self.url}>"
|
||||
|
||||
def __str__(self):
|
||||
if self.url:
|
||||
return self.url
|
||||
else:
|
||||
return "[no url]"
|
||||
return self.url
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -518,7 +504,7 @@ def fetch(self):
|
||||
|
||||
# check whether the cache file exists.
|
||||
if not os.path.isfile(path):
|
||||
raise NoCacheError("No cache of %s" % path)
|
||||
raise NoCacheError(f"No cache of {path}")
|
||||
|
||||
# remove old symlink if one is there.
|
||||
filename = self.stage.save_filename
|
||||
@@ -528,8 +514,8 @@ def fetch(self):
|
||||
# Symlink to local cached archive.
|
||||
symlink(path, filename)
|
||||
|
||||
# Remove link if checksum fails, or subsequent fetchers
|
||||
# will assume they don't need to download.
|
||||
# Remove link if checksum fails, or subsequent fetchers will assume they don't need to
|
||||
# download.
|
||||
if self.digest:
|
||||
try:
|
||||
self.check()
|
||||
@@ -538,12 +524,12 @@ def fetch(self):
|
||||
raise
|
||||
|
||||
# Notify the user how we fetched.
|
||||
tty.msg("Using cached archive: {0}".format(path))
|
||||
tty.msg(f"Using cached archive: {path}")
|
||||
|
||||
|
||||
class OCIRegistryFetchStrategy(URLFetchStrategy):
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
super().__init__(url, checksum, **kwargs)
|
||||
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
||||
super().__init__(url=url, checksum=checksum, **kwargs)
|
||||
|
||||
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
|
||||
|
||||
@@ -554,13 +540,13 @@ def fetch(self):
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except urllib.error.URLError as e:
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
@@ -725,6 +711,7 @@ class GitFetchStrategy(VCSFetchStrategy):
|
||||
"submodules",
|
||||
"get_full_repo",
|
||||
"submodules_delete",
|
||||
"git_sparse_paths",
|
||||
]
|
||||
|
||||
git_version_re = r"git version (\S+)"
|
||||
@@ -740,6 +727,7 @@ def __init__(self, **kwargs):
|
||||
self.submodules = kwargs.get("submodules", False)
|
||||
self.submodules_delete = kwargs.get("submodules_delete", False)
|
||||
self.get_full_repo = kwargs.get("get_full_repo", False)
|
||||
self.git_sparse_paths = kwargs.get("git_sparse_paths", None)
|
||||
|
||||
@property
|
||||
def git_version(self):
|
||||
@@ -807,38 +795,50 @@ def fetch(self):
|
||||
tty.debug("Already fetched {0}".format(self.stage.source_path))
|
||||
return
|
||||
|
||||
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
if self.git_sparse_paths:
|
||||
self._sparse_clone_src(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
else:
|
||||
self._clone_src(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
self.submodule_operations()
|
||||
|
||||
def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
def bare_clone(self, dest):
|
||||
"""
|
||||
Clone a repository to a path.
|
||||
Execute a bare clone for metadata only
|
||||
|
||||
This method handles cloning from git, but does not require a stage.
|
||||
|
||||
Arguments:
|
||||
dest (str or None): The path into which the code is cloned. If None,
|
||||
requires a stage and uses the stage's source path.
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
bare (bool): Execute a "bare" git clone (--bare option to git)
|
||||
Requires a destination since bare cloning does not provide source
|
||||
and shouldn't be used for staging.
|
||||
"""
|
||||
# Default to spack source path
|
||||
dest = dest or self.stage.source_path
|
||||
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
|
||||
if bare:
|
||||
# We don't need to worry about which commit/branch/tag is checked out
|
||||
clone_args = ["clone", "--bare"]
|
||||
if not debug:
|
||||
clone_args.append("--quiet")
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
elif commit:
|
||||
# We don't need to worry about which commit/branch/tag is checked out
|
||||
clone_args = ["clone", "--bare"]
|
||||
if not debug:
|
||||
clone_args.append("--quiet")
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
|
||||
def _clone_src(self, commit=None, branch=None, tag=None):
|
||||
"""
|
||||
Clone a repository to a path using git.
|
||||
|
||||
Arguments:
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
"""
|
||||
# Default to spack source path
|
||||
dest = self.stage.source_path
|
||||
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
|
||||
if commit:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular commit.
|
||||
clone_args = ["clone", self.url]
|
||||
@@ -917,6 +917,85 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
git(*pull_args, ignore_errors=1)
|
||||
git(*co_args)
|
||||
|
||||
def _sparse_clone_src(self, commit=None, branch=None, tag=None, **kwargs):
|
||||
"""
|
||||
Use git's sparse checkout feature to clone portions of a git repository
|
||||
|
||||
Arguments:
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
"""
|
||||
dest = self.stage.source_path
|
||||
git = self.git
|
||||
|
||||
if self.git_version < spack.version.Version("2.25.0.0"):
|
||||
# code paths exist where the package is not set. Assure some indentifier for the
|
||||
# package that was configured for sparse checkout exists in the error message
|
||||
identifier = str(self.url)
|
||||
if self.package:
|
||||
identifier += f" ({self.package.name})"
|
||||
tty.warn(
|
||||
(
|
||||
f"{identifier} is configured for git sparse-checkout "
|
||||
"but the git version is too old to support sparse cloning. "
|
||||
"Cloning the full repository instead."
|
||||
)
|
||||
)
|
||||
self._clone_src(commit, branch, tag)
|
||||
else:
|
||||
# default to depth=2 to allow for retention of some git properties
|
||||
depth = kwargs.get("depth", 2)
|
||||
needs_fetch = branch or tag
|
||||
git_ref = branch or tag or commit
|
||||
|
||||
assert git_ref
|
||||
|
||||
clone_args = ["clone"]
|
||||
|
||||
if needs_fetch:
|
||||
clone_args.extend(["--branch", git_ref])
|
||||
|
||||
if self.get_full_repo:
|
||||
clone_args.append("--no-single-branch")
|
||||
else:
|
||||
clone_args.append("--single-branch")
|
||||
|
||||
clone_args.extend(
|
||||
[f"--depth={depth}", "--no-checkout", "--filter=blob:none", self.url]
|
||||
)
|
||||
|
||||
sparse_args = ["sparse-checkout", "set"]
|
||||
|
||||
if callable(self.git_sparse_paths):
|
||||
sparse_args.extend(self.git_sparse_paths())
|
||||
else:
|
||||
sparse_args.extend([p for p in self.git_sparse_paths])
|
||||
|
||||
sparse_args.append("--cone")
|
||||
|
||||
checkout_args = ["checkout", git_ref]
|
||||
|
||||
if not spack.config.get("config:debug"):
|
||||
clone_args.insert(1, "--quiet")
|
||||
checkout_args.insert(1, "--quiet")
|
||||
|
||||
with temp_cwd():
|
||||
git(*clone_args)
|
||||
repo_name = get_single_file(".")
|
||||
if self.stage:
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, dest)
|
||||
|
||||
with working_dir(dest):
|
||||
git(*sparse_args)
|
||||
git(*checkout_args)
|
||||
|
||||
def submodule_operations(self):
|
||||
dest = self.stage.source_path
|
||||
git = self.git
|
||||
|
||||
if self.submodules_delete:
|
||||
with working_dir(dest):
|
||||
for submodule_to_delete in self.submodules_delete:
|
||||
@@ -1293,7 +1372,7 @@ def reset(self):
|
||||
shutil.move(scrubbed, source_path)
|
||||
|
||||
def __str__(self):
|
||||
return "[hg] %s" % self.url
|
||||
return f"[hg] {self.url}"
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1302,45 +1381,20 @@ class S3FetchStrategy(URLFetchStrategy):
|
||||
|
||||
url_attr = "s3"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
super().__init__(*args, **kwargs)
|
||||
except ValueError:
|
||||
if not kwargs.get("url"):
|
||||
raise ValueError("S3FetchStrategy requires a url for fetching.")
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
llnl.util.filesystem.rename(
|
||||
os.path.join(self.stage.path, basename), self.stage.save_filename
|
||||
if not self.url.startswith("s3://"):
|
||||
raise spack.error.FetchError(
|
||||
f"{self.__class__.__name__} can only fetch from s3:// urls."
|
||||
)
|
||||
|
||||
if self.archive_file:
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
return
|
||||
self._fetch_urllib(self.url)
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(self.url)
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1349,43 +1403,22 @@ class GCSFetchStrategy(URLFetchStrategy):
|
||||
|
||||
url_attr = "gs"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
super().__init__(*args, **kwargs)
|
||||
except ValueError:
|
||||
if not kwargs.get("url"):
|
||||
raise ValueError("GCSFetchStrategy requires a url for fetching.")
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if not self.url.startswith("gs"):
|
||||
raise spack.error.FetchError(
|
||||
f"{self.__class__.__name__} can only fetch from gs:// urls."
|
||||
)
|
||||
if self.archive_file:
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
self._fetch_urllib(self.url)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(self.url)
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1394,7 +1427,7 @@ class FetchAndVerifyExpandedFile(URLFetchStrategy):
|
||||
as well as after expanding it."""
|
||||
|
||||
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
|
||||
super().__init__(url, archive_sha256)
|
||||
super().__init__(url=url, checksum=archive_sha256)
|
||||
self.expanded_sha256 = expanded_sha256
|
||||
|
||||
def expand(self):
|
||||
@@ -1436,14 +1469,14 @@ def stable_target(fetcher):
|
||||
return False
|
||||
|
||||
|
||||
def from_url(url):
|
||||
def from_url(url: str) -> URLFetchStrategy:
|
||||
"""Given a URL, find an appropriate fetch strategy for it.
|
||||
Currently just gives you a URLFetchStrategy that uses curl.
|
||||
|
||||
TODO: make this return appropriate fetch strategies for other
|
||||
types of URLs.
|
||||
"""
|
||||
return URLFetchStrategy(url)
|
||||
return URLFetchStrategy(url=url)
|
||||
|
||||
|
||||
def from_kwargs(**kwargs):
|
||||
@@ -1512,10 +1545,12 @@ def _check_version_attributes(fetcher, pkg, version):
|
||||
def _extrapolate(pkg, version):
|
||||
"""Create a fetcher from an extrapolated URL for this version."""
|
||||
try:
|
||||
return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
||||
return URLFetchStrategy(url=pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
||||
except spack.package_base.NoURLError:
|
||||
msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
|
||||
raise ExtrapolationError(msg % (version, pkg.name))
|
||||
raise ExtrapolationError(
|
||||
f"Can't extrapolate a URL for version {version} because "
|
||||
f"package {pkg.name} defines no URLs"
|
||||
)
|
||||
|
||||
|
||||
def _from_merged_attrs(fetcher, pkg, version):
|
||||
@@ -1532,8 +1567,11 @@ def _from_merged_attrs(fetcher, pkg, version):
|
||||
attrs["fetch_options"] = pkg.fetch_options
|
||||
attrs.update(pkg.versions[version])
|
||||
|
||||
if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
|
||||
attrs.setdefault("submodules", pkg.submodules)
|
||||
if fetcher.url_attr == "git":
|
||||
pkg_attr_list = ["submodules", "git_sparse_paths"]
|
||||
for pkg_attr in pkg_attr_list:
|
||||
if hasattr(pkg, pkg_attr):
|
||||
attrs.setdefault(pkg_attr, getattr(pkg, pkg_attr))
|
||||
|
||||
return fetcher(**attrs)
|
||||
|
||||
@@ -1628,11 +1666,9 @@ def for_package_version(pkg, version=None):
|
||||
raise InvalidArgsError(pkg, version, **args)
|
||||
|
||||
|
||||
def from_url_scheme(url, *args, **kwargs):
|
||||
def from_url_scheme(url: str, **kwargs):
|
||||
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
|
||||
in the given url."""
|
||||
|
||||
url = kwargs.get("url", url)
|
||||
parsed_url = urllib.parse.urlparse(url, scheme="file")
|
||||
|
||||
scheme_mapping = kwargs.get("scheme_mapping") or {
|
||||
@@ -1649,11 +1685,9 @@ def from_url_scheme(url, *args, **kwargs):
|
||||
for fetcher in all_strategies:
|
||||
url_attr = getattr(fetcher, "url_attr", None)
|
||||
if url_attr and url_attr == scheme:
|
||||
return fetcher(url, *args, **kwargs)
|
||||
return fetcher(url=url, **kwargs)
|
||||
|
||||
raise ValueError(
|
||||
'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
|
||||
)
|
||||
raise ValueError(f'No FetchStrategy found for url with scheme: "{parsed_url.scheme}"')
|
||||
|
||||
|
||||
def from_list_url(pkg):
|
||||
@@ -1678,7 +1712,9 @@ def from_list_url(pkg):
|
||||
)
|
||||
|
||||
# construct a fetcher
|
||||
return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
|
||||
return URLFetchStrategy(
|
||||
url=url_from_list, checksum=checksum, fetch_options=pkg.fetch_options
|
||||
)
|
||||
except KeyError as e:
|
||||
tty.debug(e)
|
||||
tty.msg("Cannot find version %s in url_list" % pkg.version)
|
||||
@@ -1706,10 +1742,10 @@ def store(self, fetcher, relative_dest):
|
||||
mkdirp(os.path.dirname(dst))
|
||||
fetcher.archive(dst)
|
||||
|
||||
def fetcher(self, target_path, digest, **kwargs):
|
||||
def fetcher(self, target_path: str, digest: Optional[str], **kwargs) -> CacheURLFetchStrategy:
|
||||
path = os.path.join(self.root, target_path)
|
||||
url = url_util.path_to_file_url(path)
|
||||
return CacheURLFetchStrategy(url, digest, **kwargs)
|
||||
return CacheURLFetchStrategy(url=url, checksum=digest, **kwargs)
|
||||
|
||||
def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
@@ -1722,9 +1758,9 @@ class NoCacheError(spack.error.FetchError):
|
||||
class FailedDownloadError(spack.error.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, url, msg=""):
|
||||
super().__init__("Failed to fetch file from URL: %s" % url, msg)
|
||||
self.url = url
|
||||
def __init__(self, *exceptions: Exception):
|
||||
super().__init__("Failed to download")
|
||||
self.exceptions = exceptions
|
||||
|
||||
|
||||
class NoArchiveFileError(spack.error.FetchError):
|
||||
|
||||
@@ -37,6 +37,12 @@ def __call__(self, spec):
|
||||
"""Run this hash on the provided spec."""
|
||||
return spec.spec_hash(self)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"SpecHashDescriptor(depflag={self.depflag!r}, "
|
||||
f"package_hash={self.package_hash!r}, name={self.name!r}, override={self.override!r})"
|
||||
)
|
||||
|
||||
|
||||
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
|
||||
|
||||
@@ -23,9 +23,6 @@ def post_install(spec, explicit):
|
||||
|
||||
# Push the package to all autopush mirrors
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
mirror.push_url,
|
||||
bindist.PushOptions(force=True, regenerate_index=False, unsigned=not mirror.signed),
|
||||
)
|
||||
signing_key = bindist.select_signing_key() if mirror.signed else None
|
||||
bindist.push_or_raise([spec], out_url=mirror.push_url, signing_key=signing_key, force=True)
|
||||
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user