Compare commits
904 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
12e3665df3 | ||
|
|
fa4778b9fc | ||
|
|
66d297d420 | ||
|
|
56251c11f3 | ||
|
|
40bf9a179b | ||
|
|
095aba0b9f | ||
|
|
4270136598 | ||
|
|
f73d7d2dce | ||
|
|
567566da08 | ||
|
|
30a9ab749d | ||
|
|
8160a96b27 | ||
|
|
10414d3e6c | ||
|
|
1d96c09094 | ||
|
|
e7112fbc6a | ||
|
|
b79761b7eb | ||
|
|
3381899c69 | ||
|
|
c7cf5eabc1 | ||
|
|
d88fa5cf8e | ||
|
|
2ed0e3d737 | ||
|
|
506a40cac1 | ||
|
|
447739fcef | ||
|
|
e60f6f4a6e | ||
|
|
7df35d0da0 | ||
|
|
71b035ece1 | ||
|
|
86a134235e | ||
|
|
24cd0da7fb | ||
|
|
762833a663 | ||
|
|
636d479e5f | ||
|
|
f2184f26fa | ||
|
|
e1686eef7c | ||
|
|
314893982e | ||
|
|
9ab6c30a3d | ||
|
|
ddf94291d4 | ||
|
|
5d1038c512 | ||
|
|
2e40c88d50 | ||
|
|
2bcba57757 | ||
|
|
37330e5e2b | ||
|
|
b4411cf2db | ||
|
|
65d1ae083c | ||
|
|
0b8faa3918 | ||
|
|
f077c7e33b | ||
|
|
9d7410d22e | ||
|
|
e295730d0e | ||
|
|
868327ee14 | ||
|
|
f5430b16bc | ||
|
|
2446695113 | ||
|
|
e0c6cca65c | ||
|
|
84ed4cd331 | ||
|
|
f6d50f790e | ||
|
|
d3c3d23d1e | ||
|
|
33752c2b55 | ||
|
|
26759249ca | ||
|
|
8b4cbbe7b3 | ||
|
|
be71f9fdc4 | ||
|
|
05c1e7ecc2 | ||
|
|
f7afd67a26 | ||
|
|
d22bdc1c4e | ||
|
|
540f9eefb7 | ||
|
|
2db5bca778 | ||
|
|
bcd05407b8 | ||
|
|
b35ec605fe | ||
|
|
0a353abc42 | ||
|
|
e178c58847 | ||
|
|
d7297e67a5 | ||
|
|
ee8addf04a | ||
|
|
fd3cd3a1c6 | ||
|
|
e585aeb883 | ||
|
|
1f43384db4 | ||
|
|
814b328fca | ||
|
|
125206d44d | ||
|
|
a081b875b4 | ||
|
|
a16ee3348b | ||
|
|
d654d6b1f4 | ||
|
|
9b4ca0be40 | ||
|
|
dc71dcfdc2 | ||
|
|
1f31c3374c | ||
|
|
27aeb6e293 | ||
|
|
715214c1a1 | ||
|
|
b471d62dbd | ||
|
|
a5f62889ca | ||
|
|
2a942d98e3 | ||
|
|
4a4077d4ef | ||
|
|
c0fcccc232 | ||
|
|
0b2cbfefce | ||
|
|
c499514322 | ||
|
|
ae392b5935 | ||
|
|
62e9bb5d51 | ||
|
|
6cd948184e | ||
|
|
44ff24f558 | ||
|
|
c657dfb768 | ||
|
|
f2e410d95a | ||
|
|
df443a38d6 | ||
|
|
74fe498cb8 | ||
|
|
5f13a48bf2 | ||
|
|
c4824f7fd2 | ||
|
|
49a8634584 | ||
|
|
eac5ea869f | ||
|
|
f5946c4621 | ||
|
|
8564ab19c3 | ||
|
|
aae7a22d39 | ||
|
|
09cea230b4 | ||
|
|
a1f34ec58b | ||
|
|
4d7cd4c0bf | ||
|
|
4adbfa3835 | ||
|
|
8a1b69c1d3 | ||
|
|
a1d69f8661 | ||
|
|
e05dbc529e | ||
|
|
99d33bf1f2 | ||
|
|
bd1918cd71 | ||
|
|
2a967c7df4 | ||
|
|
7596aac958 | ||
|
|
c73ded8ed6 | ||
|
|
df1d783035 | ||
|
|
47051c3690 | ||
|
|
3fd83c637d | ||
|
|
ef5afb66da | ||
|
|
ecc4336bf9 | ||
|
|
d2ed217796 | ||
|
|
272c7c069a | ||
|
|
23f16041cd | ||
|
|
e2329adac0 | ||
|
|
4ec788ca12 | ||
|
|
c1cea9d304 | ||
|
|
5c96e67bb1 | ||
|
|
7008bb6335 | ||
|
|
14561fafff | ||
|
|
89bf1edb6e | ||
|
|
cc85dc05b7 | ||
|
|
ae171f8b83 | ||
|
|
578dd18b34 | ||
|
|
a7a51ee5cf | ||
|
|
960cc90667 | ||
|
|
dea44bad8b | ||
|
|
e37870ff43 | ||
|
|
3751642a27 | ||
|
|
0f386697c6 | ||
|
|
67ce103b2c | ||
|
|
a8c9fa0e45 | ||
|
|
b56a133fce | ||
|
|
f0b3d33145 | ||
|
|
32564da9d0 | ||
|
|
8f2faf65dc | ||
|
|
1d59637051 | ||
|
|
97dc353cb0 | ||
|
|
beebe2c9d3 | ||
|
|
2eb7add8c4 | ||
|
|
a9fea9f611 | ||
|
|
9b62a9c238 | ||
|
|
f7eb0ccfc9 | ||
|
|
a0aa35667c | ||
|
|
b1d4fd14bc | ||
|
|
7e8415a3a6 | ||
|
|
7f4f42894d | ||
|
|
4e876b4014 | ||
|
|
77a8a4fe08 | ||
|
|
597e5a4e5e | ||
|
|
3c31c32f62 | ||
|
|
3a93a716e4 | ||
|
|
82229a0784 | ||
|
|
5d846a69d1 | ||
|
|
d21aa1cc12 | ||
|
|
7896ff51f6 | ||
|
|
5849a24a74 | ||
|
|
38c49d6b82 | ||
|
|
0d8900986d | ||
|
|
62554cebc4 | ||
|
|
067155cff5 | ||
|
|
08e68d779f | ||
|
|
05b04cd4c3 | ||
|
|
be48f762a9 | ||
|
|
de5b4840e9 | ||
|
|
20f9884445 | ||
|
|
deb78bcd93 | ||
|
|
06239de0e9 | ||
|
|
1f904c38b3 | ||
|
|
f2d0ba8fcc | ||
|
|
49d3eb1723 | ||
|
|
7c5439f48a | ||
|
|
7f2cedd31f | ||
|
|
d47951a1e3 | ||
|
|
f2bd0c5cf1 | ||
|
|
4362382223 | ||
|
|
ba4859b33d | ||
|
|
e8472714ef | ||
|
|
ee6960e53e | ||
|
|
dad266c955 | ||
|
|
7a234ce00a | ||
|
|
a0c2ed97c8 | ||
|
|
a3aa5b59cd | ||
|
|
f7dbb59d13 | ||
|
|
0df27bc0f7 | ||
|
|
877e09dcc1 | ||
|
|
c4439e86a2 | ||
|
|
aa00dcac96 | ||
|
|
4c9a946b3b | ||
|
|
0c6e6ad226 | ||
|
|
bf8f32443f | ||
|
|
c2eef8bab2 | ||
|
|
2df4b307d7 | ||
|
|
3c57440c10 | ||
|
|
3e6e9829da | ||
|
|
859745f1a9 | ||
|
|
ddabb8b12c | ||
|
|
16bba32124 | ||
|
|
7d87369ead | ||
|
|
7723bd28ed | ||
|
|
43f3a35150 | ||
|
|
ae9f2d4d40 | ||
|
|
5a3814ff15 | ||
|
|
946c539dbd | ||
|
|
0037462f9e | ||
|
|
e5edac4d0c | ||
|
|
3e1474dbbb | ||
|
|
0f502bb6c3 | ||
|
|
1eecbd3208 | ||
|
|
6e92b9180c | ||
|
|
ac9012da0c | ||
|
|
e3cb4f09f0 | ||
|
|
2e8600bb71 | ||
|
|
d946c37cbb | ||
|
|
47a9f0bdf7 | ||
|
|
2bf900a893 | ||
|
|
99bba0b1ce | ||
|
|
a8506f9022 | ||
|
|
4a40a76291 | ||
|
|
fe9ddf22fc | ||
|
|
1cae1299eb | ||
|
|
8b106416c0 | ||
|
|
e2088b599e | ||
|
|
56446685ca | ||
|
|
47a8d875c8 | ||
|
|
56b2d250c1 | ||
|
|
abbd09b4b2 | ||
|
|
9e5fdc6614 | ||
|
|
1224a3e8cf | ||
|
|
6c3218920f | ||
|
|
02cc3ea005 | ||
|
|
641ab95a31 | ||
|
|
e8b76c27e4 | ||
|
|
0dbe4d54b6 | ||
|
|
1eb6977049 | ||
|
|
3f1cfdb7d7 | ||
|
|
d438d7993d | ||
|
|
aa0825d642 | ||
|
|
978c20f35a | ||
|
|
d535124500 | ||
|
|
01f61a2eba | ||
|
|
7d5e27d5e8 | ||
|
|
d210425eef | ||
|
|
6be07da201 | ||
|
|
02b38716bf | ||
|
|
d7bc624c61 | ||
|
|
b7cecc9726 | ||
|
|
393a2f562b | ||
|
|
682fcec0b2 | ||
|
|
d6baae525f | ||
|
|
e1f2612581 | ||
|
|
080fc875eb | ||
|
|
69f417b26a | ||
|
|
80b5106611 | ||
|
|
34146c197a | ||
|
|
209a3bf302 | ||
|
|
e8c41cdbcb | ||
|
|
a450dd31fa | ||
|
|
7c1a309453 | ||
|
|
78b6fa96e5 | ||
|
|
1b315a9ede | ||
|
|
82df0e549d | ||
|
|
f5591f9068 | ||
|
|
98c08d277d | ||
|
|
facca4e2c8 | ||
|
|
764029bcd1 | ||
|
|
44cb4eca93 | ||
|
|
39888d4df6 | ||
|
|
f68ea49e54 | ||
|
|
78b5e4cdfa | ||
|
|
26515b8871 | ||
|
|
74640987c7 | ||
|
|
d6154645c7 | ||
|
|
faed43704b | ||
|
|
6fba31ce34 | ||
|
|
112cead00b | ||
|
|
9e2558bd56 | ||
|
|
019058226f | ||
|
|
ac0040f67d | ||
|
|
38f341f12d | ||
|
|
26ad22743f | ||
|
|
46c2b8a565 | ||
|
|
5cbb59f2b8 | ||
|
|
f29fa1cfdf | ||
|
|
c69951d6e1 | ||
|
|
f406f27d9c | ||
|
|
36ea208e12 | ||
|
|
17e0774189 | ||
|
|
3162c2459d | ||
|
|
7cad6c62a3 | ||
|
|
eb2ddf6fa2 | ||
|
|
2bc2902fed | ||
|
|
b362362291 | ||
|
|
32bb5c7523 | ||
|
|
a2b76c68a0 | ||
|
|
62132919e1 | ||
|
|
b06929f6df | ||
|
|
0f33de157b | ||
|
|
03a074ebe7 | ||
|
|
4d12b6a4fd | ||
|
|
26bb15e1fb | ||
|
|
1bf92c7881 | ||
|
|
eefe0b2eec | ||
|
|
de6c6f0cd9 | ||
|
|
309d3aa1ec | ||
|
|
feff11f914 | ||
|
|
de3b324983 | ||
|
|
747cd374df | ||
|
|
8b3ac40436 | ||
|
|
28e9be443c | ||
|
|
1381bede80 | ||
|
|
6502785908 | ||
|
|
53257408a3 | ||
|
|
28d02dff60 | ||
|
|
9d60b42a97 | ||
|
|
9ff5a30574 | ||
|
|
9a6c013365 | ||
|
|
9f62a3e819 | ||
|
|
e380e9a0ab | ||
|
|
8415ea9ada | ||
|
|
6960766e0c | ||
|
|
0c2ca8c841 | ||
|
|
273960fdbb | ||
|
|
0cd2a1102c | ||
|
|
e40676e901 | ||
|
|
4ddb07e94f | ||
|
|
50585d55c5 | ||
|
|
5d6b5f3f6f | ||
|
|
2351c19489 | ||
|
|
08d49361f0 | ||
|
|
c3c63e5ca4 | ||
|
|
e72d4075bd | ||
|
|
f9f97bf22b | ||
|
|
8033455d5f | ||
|
|
50a5a6fea4 | ||
|
|
0de8a0e3f3 | ||
|
|
0a26e74cc8 | ||
|
|
9dfd91efbb | ||
|
|
1a7baadbff | ||
|
|
afcfd56ae5 | ||
|
|
7eb2e704b6 | ||
|
|
564b4fa263 | ||
|
|
0a941b43ca | ||
|
|
35ff24ddea | ||
|
|
7019e4e3cb | ||
|
|
cb16b8a047 | ||
|
|
381acb3726 | ||
|
|
d87ea0b256 | ||
|
|
1a757e7f70 | ||
|
|
704e2c53a8 | ||
|
|
478d8a668c | ||
|
|
7903f9fcfd | ||
|
|
670d3d3fdc | ||
|
|
e8aab6b31c | ||
|
|
1ce408ecc5 | ||
|
|
dc81a2dcdb | ||
|
|
b10f51f020 | ||
|
|
4f4e3f5607 | ||
|
|
00fb80e766 | ||
|
|
057603cad8 | ||
|
|
5b8b6e492d | ||
|
|
763279cd61 | ||
|
|
e4237b9153 | ||
|
|
d288658cf0 | ||
|
|
2c22ae0576 | ||
|
|
fc3fc94689 | ||
|
|
b5013c1372 | ||
|
|
e220674c4d | ||
|
|
7f13518225 | ||
|
|
96a13a97e6 | ||
|
|
6d244b3f67 | ||
|
|
6bc66db141 | ||
|
|
acfb2b9270 | ||
|
|
d92a2c31fb | ||
|
|
e32561aff6 | ||
|
|
4b0479159f | ||
|
|
03bfd36926 | ||
|
|
4d30c8dce4 | ||
|
|
49d4104f22 | ||
|
|
07fb83b493 | ||
|
|
263007ba81 | ||
|
|
3b6e99381f | ||
|
|
a30af1ac54 | ||
|
|
294742ab7b | ||
|
|
6391559fb6 | ||
|
|
d4d4f813a9 | ||
|
|
4667163dc4 | ||
|
|
439f105285 | ||
|
|
f65b1fd7b6 | ||
|
|
d23e06c27e | ||
|
|
b76e9a887b | ||
|
|
55ffd439ce | ||
|
|
d8a7b88e7b | ||
|
|
aaa1bb1d98 | ||
|
|
0d94b8044b | ||
|
|
5a52780f7c | ||
|
|
dd0a8452ee | ||
|
|
c467bba73e | ||
|
|
d680a0cb99 | ||
|
|
efadee26ef | ||
|
|
2077b3a006 | ||
|
|
8e0c659b51 | ||
|
|
863ab5a597 | ||
|
|
db4e76ab27 | ||
|
|
6728a46a84 | ||
|
|
5a09459dd5 | ||
|
|
7e14ff806a | ||
|
|
7e88cf795c | ||
|
|
1536e3d422 | ||
|
|
1fe8e63481 | ||
|
|
dfca2c285e | ||
|
|
2686f778fa | ||
|
|
925e9c73b1 | ||
|
|
aba447e885 | ||
|
|
1113de0dad | ||
|
|
4110225166 | ||
|
|
24c839c837 | ||
|
|
42c6a6b189 | ||
|
|
b0ea1c6f24 | ||
|
|
735102eb2b | ||
|
|
2e3cdb349b | ||
|
|
05c8030119 | ||
|
|
bbcd4224fa | ||
|
|
4c0cdb99b3 | ||
|
|
f22d009c6d | ||
|
|
c5a3e36ad0 | ||
|
|
1c76ba1c3e | ||
|
|
b969f739bd | ||
|
|
4788c4774c | ||
|
|
34de028dbc | ||
|
|
a69254fd79 | ||
|
|
af5f205759 | ||
|
|
77f9100a59 | ||
|
|
386bb71392 | ||
|
|
0676d6457f | ||
|
|
0b80e36867 | ||
|
|
4c9816f10c | ||
|
|
fb6741cf85 | ||
|
|
3f2fa256fc | ||
|
|
d5c8864942 | ||
|
|
b3cef1072d | ||
|
|
e8ae9a403c | ||
|
|
1a8ef161c8 | ||
|
|
d3913938bc | ||
|
|
4179880fe6 | ||
|
|
125dd0368e | ||
|
|
fd68f8916c | ||
|
|
93e6f5fa4e | ||
|
|
54acda3f11 | ||
|
|
663e20fcc4 | ||
|
|
6428132ebb | ||
|
|
171958cf09 | ||
|
|
0d0f7ab030 | ||
|
|
35f8b43a54 | ||
|
|
6f7eb3750c | ||
|
|
2121eb31ba | ||
|
|
c68d739825 | ||
|
|
c468697b35 | ||
|
|
c4094cf051 | ||
|
|
9ff9ca61e6 | ||
|
|
826e0c0405 | ||
|
|
1b86a842ea | ||
|
|
558a28bf52 | ||
|
|
411576e1fa | ||
|
|
cab4f92960 | ||
|
|
c6c13f6782 | ||
|
|
cf11fab5ad | ||
|
|
1d8b35c840 | ||
|
|
5dc46a976d | ||
|
|
05f5596cdd | ||
|
|
6942c7f35b | ||
|
|
18f0ac0f94 | ||
|
|
d9196ee3f8 | ||
|
|
ef0bb6fe6b | ||
|
|
3fed320013 | ||
|
|
1aa77e695d | ||
|
|
3a0efeecf1 | ||
|
|
5ffb5657c9 | ||
|
|
2b3e7fd10a | ||
|
|
cb315e18f0 | ||
|
|
10c637aca0 | ||
|
|
fb4e1cad45 | ||
|
|
3054b71e2e | ||
|
|
47163f7435 | ||
|
|
e322a8382f | ||
|
|
53fb4795ca | ||
|
|
4517c7fa9b | ||
|
|
efaed17f91 | ||
|
|
2c17cd365d | ||
|
|
dfe537f688 | ||
|
|
be0002b460 | ||
|
|
743ee5f3de | ||
|
|
b6caf0156f | ||
|
|
ec00ffc244 | ||
|
|
f020256b9f | ||
|
|
04377e39e0 | ||
|
|
ba2703fea6 | ||
|
|
92b1c8f763 | ||
|
|
2b29ecd9b6 | ||
|
|
5b43bf1b58 | ||
|
|
37d9770e02 | ||
|
|
0e016ba6f5 | ||
|
|
7afa949da1 | ||
|
|
b81d7d0aac | ||
|
|
e78484f501 | ||
|
|
6fd43b4e75 | ||
|
|
14edb55288 | ||
|
|
f062f1c5b3 | ||
|
|
7756c8f4fc | ||
|
|
69c8a9e4ba | ||
|
|
47c0736952 | ||
|
|
8b89287084 | ||
|
|
8bd6283b52 | ||
|
|
179e4f3ad1 | ||
|
|
e97787691b | ||
|
|
5932ee901c | ||
|
|
3bdebeba3c | ||
|
|
d390ee1902 | ||
|
|
4f9fe6f9bf | ||
|
|
df6d6d9b5c | ||
|
|
e57d33b29f | ||
|
|
85c6d6dbab | ||
|
|
5f9228746e | ||
|
|
9f2451ddff | ||
|
|
a05eb11b7b | ||
|
|
ae2d0ff1cd | ||
|
|
7e906ced75 | ||
|
|
647e89f6bc | ||
|
|
3239c29fb0 | ||
|
|
abced0e87d | ||
|
|
300fc2ee42 | ||
|
|
13c4258e54 | ||
|
|
f29cb7f953 | ||
|
|
826b8f25c5 | ||
|
|
ebaeea7820 | ||
|
|
f76eb993aa | ||
|
|
0b2c370a83 | ||
|
|
6a9ee480bf | ||
|
|
cc80d52b62 | ||
|
|
b9c7d3b89b | ||
|
|
c1be6a5483 | ||
|
|
42550208c3 | ||
|
|
be231face6 | ||
|
|
89ac747a76 | ||
|
|
5d8f36d667 | ||
|
|
6c3fed351f | ||
|
|
b9cbd15674 | ||
|
|
b8f633246a | ||
|
|
a2f3e98ab9 | ||
|
|
acffe37313 | ||
|
|
249e5415e8 | ||
|
|
e2a942d07e | ||
|
|
32deca2a4c | ||
|
|
e4c64865f1 | ||
|
|
1175f37577 | ||
|
|
faa183331f | ||
|
|
bbac33871c | ||
|
|
6d4dd33c46 | ||
|
|
579bad05a8 | ||
|
|
27a8eb0f68 | ||
|
|
4cd993070f | ||
|
|
4c55c6a268 | ||
|
|
a4a27fb1e4 | ||
|
|
66345e7185 | ||
|
|
8f76f1b0d8 | ||
|
|
4cab6f3af5 | ||
|
|
0d4665583b | ||
|
|
5d0ef9e4f4 | ||
|
|
e145baf619 | ||
|
|
6c912b30a2 | ||
|
|
f4da453f6b | ||
|
|
7e9caed8c2 | ||
|
|
69509a6d9a | ||
|
|
0841050d20 | ||
|
|
321ffd732b | ||
|
|
22922323e3 | ||
|
|
0b5b192c18 | ||
|
|
1275c57d88 | ||
|
|
29a39ac6a0 | ||
|
|
ae9c86a930 | ||
|
|
83199a981d | ||
|
|
ed40c3210e | ||
|
|
be96460ab2 | ||
|
|
95caf55fe7 | ||
|
|
960af24270 | ||
|
|
899bef2aa8 | ||
|
|
f0f092d9f1 | ||
|
|
6eaac2270d | ||
|
|
a9f3f6c007 | ||
|
|
08a04ebd46 | ||
|
|
d8e642ecb7 | ||
|
|
669ed69d8e | ||
|
|
7ebb21a0da | ||
|
|
93ffa9ba5d | ||
|
|
e5fdb90496 | ||
|
|
303a0b3653 | ||
|
|
9f07544bde | ||
|
|
9b046a39a8 | ||
|
|
0c9a53ba3a | ||
|
|
1fd4353289 | ||
|
|
fcb8ed6409 | ||
|
|
2f11862832 | ||
|
|
bff11ce8e7 | ||
|
|
218693431c | ||
|
|
e036cd9ef6 | ||
|
|
cd5bef6780 | ||
|
|
159e9a20d1 | ||
|
|
99bb288db7 | ||
|
|
99744a766b | ||
|
|
ddd8be51a0 | ||
|
|
bba66b1063 | ||
|
|
1c3c21d9c7 | ||
|
|
cbe9b3d01c | ||
|
|
0abf5ba43c | ||
|
|
9ab3c1332b | ||
|
|
b6425da50f | ||
|
|
937a4dbf69 | ||
|
|
cd779ee54d | ||
|
|
7ddcb13325 | ||
|
|
7666046ce3 | ||
|
|
8e89e61402 | ||
|
|
d0dbfaa5d6 | ||
|
|
26f562b5a7 | ||
|
|
2967804da1 | ||
|
|
c3eaf4d6cf | ||
|
|
397334a4be | ||
|
|
434836be81 | ||
|
|
7b9b976f40 | ||
|
|
4746e8a048 | ||
|
|
69c684fef9 | ||
|
|
2314aeb884 | ||
|
|
d33e10a695 | ||
|
|
7668a0889a | ||
|
|
d7a74bde9f | ||
|
|
fedf8128ae | ||
|
|
f70af2cc57 | ||
|
|
50562e6a0e | ||
|
|
4ac51b2127 | ||
|
|
81c9e346dc | ||
|
|
73e16a7881 | ||
|
|
af8868fa47 | ||
|
|
cfd4e356f8 | ||
|
|
fc87dcad4c | ||
|
|
65472159c7 | ||
|
|
d1f9d8f06d | ||
|
|
67ac9c46a8 | ||
|
|
aa39465188 | ||
|
|
09810a5e7c | ||
|
|
446c0f2325 | ||
|
|
c4ce51c9be | ||
|
|
1f63a764ac | ||
|
|
384e198304 | ||
|
|
2303332415 | ||
|
|
0eb1957999 | ||
|
|
de1f9593c6 | ||
|
|
65fa71c1b4 | ||
|
|
9802649716 | ||
|
|
8d9d721f07 | ||
|
|
ecef72c471 | ||
|
|
485b6e2170 | ||
|
|
ba02c6b70f | ||
|
|
7028669d50 | ||
|
|
2f0a73f7ef | ||
|
|
7cb0dbf77a | ||
|
|
ac8800ffc7 | ||
|
|
eb11fa7d18 | ||
|
|
4d8381a775 | ||
|
|
de5e20fc21 | ||
|
|
c33af49ed5 | ||
|
|
3addda6c4d | ||
|
|
33f6f55d6b | ||
|
|
41d20d3731 | ||
|
|
dde8fa5561 | ||
|
|
588a94bc8c | ||
|
|
06392f2c01 | ||
|
|
f16e29559e | ||
|
|
ea96403157 | ||
|
|
b659eac453 | ||
|
|
ab590cc03a | ||
|
|
1a007a842b | ||
|
|
9756354998 | ||
|
|
3984dd750c | ||
|
|
d5c1e16e43 | ||
|
|
56ace9a087 | ||
|
|
6e0bab1706 | ||
|
|
193386f6ac | ||
|
|
755131fcdf | ||
|
|
9a71733adb | ||
|
|
cd919d51ea | ||
|
|
12adf66d07 | ||
|
|
c02f58da8f | ||
|
|
9662d181a0 | ||
|
|
282df7aecc | ||
|
|
b4c0e6f03b | ||
|
|
4cd8488139 | ||
|
|
69a052841c | ||
|
|
a3f39890c2 | ||
|
|
02d126ce2b | ||
|
|
339a63370f | ||
|
|
fef6aed627 | ||
|
|
3445da807e | ||
|
|
429c3598af | ||
|
|
3d8136493a | ||
|
|
8cd160db85 | ||
|
|
a7dd756b34 | ||
|
|
53be280681 | ||
|
|
5ab10d57be | ||
|
|
96061d2c00 | ||
|
|
e78d20dc84 | ||
|
|
6d2341c109 | ||
|
|
968ad02473 | ||
|
|
b93882804f | ||
|
|
f58ebd4fbb | ||
|
|
6f7f9528e5 | ||
|
|
59c7ff8683 | ||
|
|
4495e0341d | ||
|
|
ba39924046 | ||
|
|
751c3fef86 | ||
|
|
102811adb9 | ||
|
|
8f56eb620f | ||
|
|
ec517b40e9 | ||
|
|
22cb3815fe | ||
|
|
f549354f78 | ||
|
|
dc212d0e59 | ||
|
|
8f14acb139 | ||
|
|
c38ef72b06 | ||
|
|
7d67d9ece4 | ||
|
|
2c30962c74 | ||
|
|
cc28334049 | ||
|
|
dbdf5bacc4 | ||
|
|
531f01f0b9 | ||
|
|
794593b478 | ||
|
|
afcf0d2e39 | ||
|
|
29ee861366 | ||
|
|
b1a984ef02 | ||
|
|
cc545d8c9a | ||
|
|
49ff816fb0 | ||
|
|
8c33841567 | ||
|
|
21b50fbbe3 | ||
|
|
2a8e503a04 | ||
|
|
4b695d4722 | ||
|
|
2fa816184e | ||
|
|
25f622e809 | ||
|
|
7506acabe7 | ||
|
|
3a828358cb | ||
|
|
94a1d1414a | ||
|
|
0f080b38f4 | ||
|
|
f1ec4859c8 | ||
|
|
63baba0308 | ||
|
|
aeec861544 | ||
|
|
e54d4678f9 | ||
|
|
187b8adb4f | ||
|
|
d6fd96f024 | ||
|
|
e3b6d2c3c7 | ||
|
|
1e9c46296c | ||
|
|
48183b37be | ||
|
|
9a3d248348 | ||
|
|
03e22adb5b | ||
|
|
5f5fc78236 | ||
|
|
e12a8a69c7 | ||
|
|
001af62585 | ||
|
|
f5e89df6f2 | ||
|
|
ce75adada6 | ||
|
|
24d37df1a2 | ||
|
|
a9d294c532 | ||
|
|
9dcaa56db4 | ||
|
|
98162aa2e1 | ||
|
|
3934df622c | ||
|
|
dbf5d79557 | ||
|
|
97e29e501d | ||
|
|
258c651a8f | ||
|
|
43ca6da346 | ||
|
|
9786bd932b | ||
|
|
c72619d4db | ||
|
|
8ecae17c46 | ||
|
|
1e47ccb83a | ||
|
|
d6421a69eb | ||
|
|
000dff2fd4 | ||
|
|
1e413477dd | ||
|
|
8955e63a68 | ||
|
|
bf14b424bb | ||
|
|
14209a86a6 | ||
|
|
b7d9900764 | ||
|
|
bc155e7b90 | ||
|
|
65f9ba345f | ||
|
|
ca49bc5652 | ||
|
|
b84b85a7e0 | ||
|
|
016cdba16f | ||
|
|
4806e6549f | ||
|
|
c14b277150 | ||
|
|
919025d9f3 | ||
|
|
52f57c90eb | ||
|
|
ee1fa3e50c | ||
|
|
772928241b | ||
|
|
7440bb4c36 | ||
|
|
c464866deb | ||
|
|
799a8a5090 | ||
|
|
c218ee50e9 | ||
|
|
8ff7a20320 | ||
|
|
e3fe6bc0f7 | ||
|
|
c6fcb1068f | ||
|
|
54ac3e72ed | ||
|
|
274fbebc4c | ||
|
|
d40eb19918 | ||
|
|
31de670bd2 | ||
|
|
6c0961549b | ||
|
|
c090bc5ebe | ||
|
|
bca4d37d76 | ||
|
|
9b484d2eea | ||
|
|
a57b0e1e2d | ||
|
|
e3cb3b29d9 | ||
|
|
ac48ecd375 | ||
|
|
0bb20d34db | ||
|
|
971fda5c33 | ||
|
|
dcc4423a9d | ||
|
|
82c380b563 | ||
|
|
8bcf6a31ae | ||
|
|
ddd88e266a | ||
|
|
08c597d83e | ||
|
|
bf5340755d | ||
|
|
f8e70a0c96 | ||
|
|
7e468aefd5 | ||
|
|
e685d04f84 | ||
|
|
9d962f55b0 | ||
|
|
00d3066b97 | ||
|
|
5ca0dcecb2 | ||
|
|
fa8fb7903b | ||
|
|
f35ff441f2 | ||
|
|
9ea9ee05c8 | ||
|
|
24ddc49c1b | ||
|
|
2f4266161c | ||
|
|
7bcb0fff7d | ||
|
|
cd332c6370 | ||
|
|
6daf9677f3 | ||
|
|
cb6450977d | ||
|
|
bf62ac0769 | ||
|
|
0223fe746b | ||
|
|
12fba13441 | ||
|
|
0c44f5a140 | ||
|
|
f4853790c5 | ||
|
|
9ed2e396f4 | ||
|
|
3ee6fc937e | ||
|
|
c9b6cc9a58 | ||
|
|
58b394bcec | ||
|
|
4d89eeca9b | ||
|
|
bfc71e9dae | ||
|
|
f061dcda74 | ||
|
|
cc460894fd | ||
|
|
5e09660e87 | ||
|
|
5a8efb3b14 | ||
|
|
99002027c4 | ||
|
|
a247879be3 | ||
|
|
7b46993fed | ||
|
|
dd59f4ba34 | ||
|
|
18ab14e659 | ||
|
|
28eb5e1bf6 | ||
|
|
c658ddbfa3 | ||
|
|
12963c894f | ||
|
|
61fa12508f | ||
|
|
daf6acef6e | ||
|
|
d30621e787 | ||
|
|
dd4b365608 | ||
|
|
157d47fc5a | ||
|
|
13daa1b692 | ||
|
|
f923e650f9 | ||
|
|
1a1bbb8af2 | ||
|
|
594fcc3c80 | ||
|
|
76ec19b26e | ||
|
|
00baaf868e | ||
|
|
3b06347f65 | ||
|
|
5b9e207db2 | ||
|
|
d6fd9017c4 | ||
|
|
913d79238e | ||
|
|
250038fa9b | ||
|
|
26c553fce7 | ||
|
|
e24c242fb7 | ||
|
|
ca14ce2629 | ||
|
|
44f443946c | ||
|
|
6e6bc89bda | ||
|
|
8714ea6652 | ||
|
|
df92f0a7d4 | ||
|
|
d24b91157c | ||
|
|
1a0f77388c | ||
|
|
34571d4ad6 | ||
|
|
a574f40732 | ||
|
|
d4ffe244af | ||
|
|
e08e66ad89 | ||
|
|
0543710258 | ||
|
|
5d994e48d5 | ||
|
|
d1fa23e9c6 | ||
|
|
f1db8b7871 | ||
|
|
c5cca54c27 | ||
|
|
a9c1648db8 | ||
|
|
3bd911377e | ||
|
|
fcb2f7d3aa | ||
|
|
a8a9e0160a | ||
|
|
9ca6aaeafd | ||
|
|
aed5c39312 |
4
.devcontainer/devcontainer.json
Normal file
4
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||||
|
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||||
|
}
|
||||||
20
.devcontainer/postCreateCommand.sh
Executable file
20
.devcontainer/postCreateCommand.sh
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Load spack environment at terminal startup
|
||||||
|
cat <<EOF >> /root/.bashrc
|
||||||
|
. /workspaces/spack/share/spack/setup-env.sh
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Load spack environment in this script
|
||||||
|
. /workspaces/spack/share/spack/setup-env.sh
|
||||||
|
|
||||||
|
# Ensure generic targets for maximum matching with buildcaches
|
||||||
|
spack config --scope site add "packages:all:require:[target=x86_64_v3]"
|
||||||
|
spack config --scope site add "concretizer:targets:granularity:generic"
|
||||||
|
|
||||||
|
# Find compiler and install gcc-runtime
|
||||||
|
spack compiler find --scope site
|
||||||
|
|
||||||
|
# Setup buildcaches
|
||||||
|
spack mirror add --scope site develop https://binaries.spack.io/develop
|
||||||
|
spack buildcache keys --install --trust
|
||||||
42
.github/workflows/audit.yaml
vendored
42
.github/workflows/audit.yaml
vendored
@@ -17,33 +17,53 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
# Run audits on all the packages in the built-in repository
|
# Run audits on all the packages in the built-in repository
|
||||||
package-audits:
|
package-audits:
|
||||||
runs-on: ${{ matrix.operating_system }}
|
runs-on: ${{ matrix.system.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
system:
|
||||||
|
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||||
|
- { os: ubuntu-latest, shell: bash }
|
||||||
|
- { os: macos-latest, shell: bash }
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: ${{ matrix.system.shell }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||||
|
- name: Setup for Windows run
|
||||||
|
if: runner.os == 'Windows'
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pywin32
|
||||||
- name: Package audits (with coverage)
|
- name: Package audits (with coverage)
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
coverage run $(which spack) audit packages
|
coverage run $(which spack) audit packages
|
||||||
coverage run $(which spack) audit externals
|
coverage run $(which spack) -d audit externals
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Package audits (without coverage)
|
- name: Package audits (without coverage)
|
||||||
if: ${{ inputs.with_coverage == 'false' }}
|
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) audit packages
|
spack -d audit packages
|
||||||
$(which spack) audit externals
|
spack -d audit externals
|
||||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
- name: Package audits (without coverage)
|
||||||
|
if: ${{ runner.os == 'Windows' }}
|
||||||
|
run: |
|
||||||
|
. share/spack/setup-env.sh
|
||||||
|
spack -d audit packages
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
spack -d audit externals
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
flags: unittests,audits
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
|||||||
5
.github/workflows/bootstrap-test.sh
vendored
5
.github/workflows/bootstrap-test.sh
vendored
@@ -1,7 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||||
$PYTHON bin/spack bootstrap disable spack-install
|
$PYTHON bin/spack bootstrap disable spack-install
|
||||||
$PYTHON bin/spack -d solve zlib
|
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||||
tree $BOOTSTRAP/store
|
tree $BOOTSTRAP/store
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
346
.github/workflows/bootstrap.yml
vendored
346
.github/workflows/bootstrap.yml
vendored
@@ -13,118 +13,22 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fedora-clingo-sources:
|
distros-clingo-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "fedora:latest"
|
container: ${{ matrix.image }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup Fedora
|
||||||
|
if: ${{ matrix.image == 'fedora:latest' }}
|
||||||
run: |
|
run: |
|
||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Checkout
|
- name: Setup OpenSUSE
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-clingo-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
|
||||||
cmake bison
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-clingo-binaries-and-patchelf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
opensuse-clingo-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "opensuse/leap:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
run: |
|
||||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||||
zypper update -y || zypper update -y
|
zypper update -y || zypper update -y
|
||||||
@@ -133,15 +37,9 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -151,77 +49,102 @@ jobs:
|
|||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-sources:
|
clingo-sources:
|
||||||
runs-on: macos-latest
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison@2.7 tree
|
brew install cmake bison tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
spack bootstrap disable github-actions-v0.4
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-binaries:
|
gnupg-sources:
|
||||||
runs-on: ${{ matrix.macos-version }}
|
runs-on: ${{ matrix.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
macos-version: ['macos-11', 'macos-12']
|
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree
|
||||||
- name: Checkout
|
# Remove GnuPG since we want to bootstrap it
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- name: Bootstrap clingo
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
|
||||||
not_found=1
|
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
|
||||||
if $ver_dir/python --version ; then
|
|
||||||
export PYTHON="$ver_dir/python"
|
|
||||||
not_found=0
|
|
||||||
old_path="$PATH"
|
|
||||||
export PATH="$ver_dir:$PATH"
|
|
||||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
|
||||||
export PATH="$old_path"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
# NOTE: test all pythons that exist, not all do on 12
|
|
||||||
done
|
|
||||||
|
|
||||||
ubuntu-clingo-binaries:
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
git --version
|
source share/spack/setup-env.sh
|
||||||
. .github/workflows/setup_git.sh
|
spack solve zlib
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack -d gpg list
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
from-binaries:
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
|
steps:
|
||||||
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
brew install tree
|
||||||
|
# Remove GnuPG since we want to bootstrap it
|
||||||
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: |
|
||||||
|
3.8
|
||||||
|
3.9
|
||||||
|
3.10
|
||||||
|
3.11
|
||||||
|
3.12
|
||||||
|
- name: Set bootstrap sources
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable spack-install
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -e
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||||
not_found=1
|
not_found=1
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
if [[ -d "$ver_dir" ]] ; then
|
||||||
|
echo "Testing $ver_dir"
|
||||||
if $ver_dir/python --version ; then
|
if $ver_dir/python --version ; then
|
||||||
export PYTHON="$ver_dir/python"
|
export PYTHON="$ver_dir/python"
|
||||||
not_found=0
|
not_found=0
|
||||||
@@ -236,122 +159,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
ubuntu-gnupg-binaries:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-gnupg-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
|
||||||
gawk
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack solve zlib
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
macos-gnupg-binaries:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
brew install tree
|
|
||||||
# Remove GnuPG since we want to bootstrap it
|
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-gnupg-sources:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
brew install gawk tree
|
|
||||||
# Remove GnuPG since we want to bootstrap it
|
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack solve zlib
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
|
|
||||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
|
||||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
|
||||||
# See:
|
|
||||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
|
||||||
# - https://github.com/actions/checkout/issues/760
|
|
||||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
|
||||||
|
|||||||
30
.github/workflows/build-containers.yml
vendored
30
.github/workflows/build-containers.yml
vendored
@@ -45,17 +45,18 @@ jobs:
|
|||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||||
|
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
|
|
||||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
@@ -87,19 +88,19 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c
|
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -107,16 +108,27 @@ jobs:
|
|||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56
|
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||||
|
|
||||||
|
merge-dockerfiles:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: deploy-images
|
||||||
|
steps:
|
||||||
|
- name: Merge Artifacts
|
||||||
|
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
|
with:
|
||||||
|
name: dockerfiles
|
||||||
|
pattern: dockerfiles_*
|
||||||
|
delete-merged: true
|
||||||
|
|||||||
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
|||||||
prechecks:
|
prechecks:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
uses: ./.github/workflows/valid-style.yml
|
uses: ./.github/workflows/valid-style.yml
|
||||||
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
all-prechecks:
|
all-prechecks:
|
||||||
@@ -35,12 +36,12 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# For pull requests it's not necessary to checkout the code
|
# For pull requests it's not necessary to checkout the code
|
||||||
- uses: dorny/paths-filter@ebc4d7e9ebcb0b1eb21480bb8f43113e996ac77a
|
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||||
id: filter
|
id: filter
|
||||||
with:
|
with:
|
||||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||||
@@ -70,14 +71,17 @@ jobs:
|
|||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/bootstrap.yml
|
uses: ./.github/workflows/bootstrap.yml
|
||||||
|
secrets: inherit
|
||||||
unit-tests:
|
unit-tests:
|
||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/unit_tests.yaml
|
uses: ./.github/workflows/unit_tests.yaml
|
||||||
|
secrets: inherit
|
||||||
windows:
|
windows:
|
||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||||
needs: [ prechecks ]
|
needs: [ prechecks ]
|
||||||
uses: ./.github/workflows/windows_python.yml
|
uses: ./.github/workflows/windows_python.yml
|
||||||
|
secrets: inherit
|
||||||
all:
|
all:
|
||||||
needs: [ windows, unit-tests, bootstrap ]
|
needs: [ windows, unit-tests, bootstrap ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
|||||||
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
|||||||
black==24.2.0
|
black==24.4.2
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.0.0
|
flake8==7.0.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
|
|||||||
37
.github/workflows/unit_tests.yaml
vendored
37
.github/workflows/unit_tests.yaml
vendored
@@ -51,10 +51,10 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -91,17 +91,19 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -122,9 +124,11 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
|
||||||
# Test RHEL8 UBI with platform Python. This job is run
|
# Test RHEL8 UBI with platform Python. This job is run
|
||||||
# only on PRs modifying core Spack
|
# only on PRs modifying core Spack
|
||||||
@@ -137,7 +141,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -156,10 +160,10 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -181,20 +185,23 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
# Run unit tests on MacOS
|
# Run unit tests on MacOS
|
||||||
macos:
|
macos:
|
||||||
runs-on: macos-latest
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
os: [macos-13, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -216,6 +223,8 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
|||||||
11
.github/workflows/valid-style.yml
vendored
11
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -35,10 +35,10 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -56,6 +56,7 @@ jobs:
|
|||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
audit:
|
audit:
|
||||||
uses: ./.github/workflows/audit.yaml
|
uses: ./.github/workflows/audit.yaml
|
||||||
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
python_version: '3.11'
|
python_version: '3.11'
|
||||||
@@ -69,7 +70,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
|||||||
20
.github/workflows/windows_python.yml
vendored
20
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
|||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -33,16 +33,18 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -57,16 +59,18 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
build-abseil:
|
build-abseil:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
|||||||
45
CHANGELOG.md
45
CHANGELOG.md
@@ -1,3 +1,48 @@
|
|||||||
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||||
|
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||||
|
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||||
|
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||||
|
- OCI buildcaches:
|
||||||
|
- only push in parallel when forking (#42143)
|
||||||
|
- use pickleable errors (#42160)
|
||||||
|
- Fix using sticky variants in externals (#42253)
|
||||||
|
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||||
|
- py-transformers: add v4.35.2 (#41266)
|
||||||
|
- mgard: fix OpenMP on AppleClang (#42933)
|
||||||
|
|
||||||
|
# v0.21.1 (2024-01-11)
|
||||||
|
|
||||||
|
## New features
|
||||||
|
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- spack graph: fix coloring with environments (#41240)
|
||||||
|
- spack info: sort variants in --variants-by-name (#41389)
|
||||||
|
- Spec.format: error on old style format strings (#41934)
|
||||||
|
- ASP-based solver:
|
||||||
|
- fix infinite recursion when computing concretization errors (#41061)
|
||||||
|
- don't error for type mismatch on preferences (#41138)
|
||||||
|
- don't emit spurious debug output (#41218)
|
||||||
|
- Improve the error message for deprecated preferences (#41075)
|
||||||
|
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||||
|
- Fix multi-word aliases (#41126)
|
||||||
|
- Add a warning for unconfigured compiler (#41213)
|
||||||
|
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||||
|
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||||
|
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- root: add a webgui patch to address security issue (#41404)
|
||||||
|
- BerkeleyGW: update source urls (#38218)
|
||||||
|
|
||||||
# v0.21.0 (2023-11-11)
|
# v0.21.0 (2023-11-11)
|
||||||
|
|
||||||
`v0.21.0` is a major feature release.
|
`v0.21.0` is a major feature release.
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ Resources:
|
|||||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||||
`@mention` us!
|
`@mention` us!
|
||||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||||
only for announcements. Please use other venues for discussions.
|
only for announcements. Please use other venues for discussions.
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ concretizer:
|
|||||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||||
# concretization. If `dependencies`, we'll only reuse dependencies but
|
# concretization. If `dependencies`, we'll only reuse dependencies but
|
||||||
# give you a fresh concretization for your root specs.
|
# give you a fresh concretization for your root specs.
|
||||||
reuse: dependencies
|
reuse: true
|
||||||
# Options that tune which targets are considered for concretization. The
|
# Options that tune which targets are considered for concretization. The
|
||||||
# concretization process is very sensitive to the number targets, and the time
|
# concretization process is very sensitive to the number targets, and the time
|
||||||
# needed to reach a solution increases noticeably with the number of targets
|
# needed to reach a solution increases noticeably with the number of targets
|
||||||
@@ -42,3 +42,8 @@ concretizer:
|
|||||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
|
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||||
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
|
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||||
|
os_compatible: {}
|
||||||
|
|||||||
@@ -101,6 +101,12 @@ config:
|
|||||||
verify_ssl: true
|
verify_ssl: true
|
||||||
|
|
||||||
|
|
||||||
|
# This is where custom certs for proxy/firewall are stored.
|
||||||
|
# It can be a path or environment variable. To match ssl env configuration
|
||||||
|
# the default is the environment variable SSL_CERT_FILE
|
||||||
|
ssl_certs: $SSL_CERT_FILE
|
||||||
|
|
||||||
|
|
||||||
# Suppress gpg warnings from binary package verification
|
# Suppress gpg warnings from binary package verification
|
||||||
# Only suppresses warnings, gpg failure will still fail the install
|
# Only suppresses warnings, gpg failure will still fail the install
|
||||||
# Potential rationale to set True: users have already explicitly trusted the
|
# Potential rationale to set True: users have already explicitly trusted the
|
||||||
|
|||||||
19
etc/spack/defaults/cray/packages.yaml
Normal file
19
etc/spack/defaults/cray/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# This file controls default concretization preferences for Spack.
|
||||||
|
#
|
||||||
|
# Settings here are versioned with Spack and are intended to provide
|
||||||
|
# sensible defaults out of the box. Spack maintainers should edit this
|
||||||
|
# file to keep it current.
|
||||||
|
#
|
||||||
|
# Users can override these settings by editing the following files.
|
||||||
|
#
|
||||||
|
# Per-spack-instance settings (overrides defaults):
|
||||||
|
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||||
|
#
|
||||||
|
# Per-user settings (overrides default and site settings):
|
||||||
|
# ~/.spack/packages.yaml
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
providers:
|
||||||
|
iconv: [glibc, musl, libiconv]
|
||||||
@@ -19,7 +19,6 @@ packages:
|
|||||||
- apple-clang
|
- apple-clang
|
||||||
- clang
|
- clang
|
||||||
- gcc
|
- gcc
|
||||||
- intel
|
|
||||||
providers:
|
providers:
|
||||||
elf: [libelf]
|
elf: [libelf]
|
||||||
fuse: [macfuse]
|
fuse: [macfuse]
|
||||||
|
|||||||
19
etc/spack/defaults/linux/packages.yaml
Normal file
19
etc/spack/defaults/linux/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# This file controls default concretization preferences for Spack.
|
||||||
|
#
|
||||||
|
# Settings here are versioned with Spack and are intended to provide
|
||||||
|
# sensible defaults out of the box. Spack maintainers should edit this
|
||||||
|
# file to keep it current.
|
||||||
|
#
|
||||||
|
# Users can override these settings by editing the following files.
|
||||||
|
#
|
||||||
|
# Per-spack-instance settings (overrides defaults):
|
||||||
|
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||||
|
#
|
||||||
|
# Per-user settings (overrides default and site settings):
|
||||||
|
# ~/.spack/packages.yaml
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
providers:
|
||||||
|
iconv: [glibc, musl, libiconv]
|
||||||
@@ -15,15 +15,17 @@
|
|||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
compiler: [gcc, intel, pgi, clang, xl, nag, fj, aocc]
|
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||||
providers:
|
providers:
|
||||||
awk: [gawk]
|
awk: [gawk]
|
||||||
|
armci: [armcimpi]
|
||||||
blas: [openblas, amdblis]
|
blas: [openblas, amdblis]
|
||||||
D: [ldc]
|
D: [ldc]
|
||||||
daal: [intel-oneapi-daal]
|
daal: [intel-oneapi-daal]
|
||||||
elf: [elfutils]
|
elf: [elfutils]
|
||||||
fftw-api: [fftw, amdfftw]
|
fftw-api: [fftw, amdfftw]
|
||||||
flame: [libflame, amdlibflame]
|
flame: [libflame, amdlibflame]
|
||||||
|
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||||
fuse: [libfuse]
|
fuse: [libfuse]
|
||||||
gl: [glx, osmesa]
|
gl: [glx, osmesa]
|
||||||
glu: [mesa-glu, openglu]
|
glu: [mesa-glu, openglu]
|
||||||
@@ -34,7 +36,10 @@ packages:
|
|||||||
java: [openjdk, jdk, ibm-java]
|
java: [openjdk, jdk, ibm-java]
|
||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
|
libc: [glibc, musl]
|
||||||
|
libgfortran: [ gcc-runtime ]
|
||||||
libglx: [mesa+glx, mesa18+glx]
|
libglx: [mesa+glx, mesa18+glx]
|
||||||
|
libifcore: [ intel-oneapi-runtime ]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
|
|||||||
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{% extends "!layout.html" %}
|
||||||
|
|
||||||
|
{%- block extrahead %}
|
||||||
|
<!-- Google tag (gtag.js) -->
|
||||||
|
<script async src="https://www.googletagmanager.com/gtag/js?id=G-S0PQ7WV75K"></script>
|
||||||
|
<script>
|
||||||
|
window.dataLayer = window.dataLayer || [];
|
||||||
|
function gtag(){dataLayer.push(arguments);}
|
||||||
|
gtag('js', new Date());
|
||||||
|
gtag('config', 'G-S0PQ7WV75K');
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
||||||
@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
|
|||||||
installed them. As you've seen, spack packages are installed into long
|
installed them. As you've seen, spack packages are installed into long
|
||||||
paths with hashes, and you need a way to get them into your path. The
|
paths with hashes, and you need a way to get them into your path. The
|
||||||
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
||||||
described in the next section.
|
described in this section.
|
||||||
|
|
||||||
Some more advanced ways to use Spack packages include:
|
Some more advanced ways to use Spack packages include:
|
||||||
|
|
||||||
@@ -959,7 +959,86 @@ use ``spack find --loaded``.
|
|||||||
You can also use ``spack load --list`` to get the same output, but it
|
You can also use ``spack load --list`` to get the same output, but it
|
||||||
does not have the full set of query options that ``spack find`` offers.
|
does not have the full set of query options that ``spack find`` offers.
|
||||||
|
|
||||||
We'll learn more about Spack's spec syntax in the next section.
|
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
|
||||||
|
|
||||||
|
|
||||||
|
.. _extensions:
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Python packages and virtual environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Spack can install a large number of Python packages. Their names are
|
||||||
|
typically prefixed with ``py-``. Installing and using them is no
|
||||||
|
different from any other package:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install py-numpy
|
||||||
|
$ spack load py-numpy
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
The ``spack load`` command sets the ``PATH`` variable so that the right Python
|
||||||
|
executable is used, and makes sure that ``numpy`` and its dependencies can be
|
||||||
|
located in the ``PYTHONPATH``.
|
||||||
|
|
||||||
|
Spack is different from other Python package managers in that it installs
|
||||||
|
every package into its *own* prefix. This is in contrast to ``pip``, which
|
||||||
|
installs all packages into the same prefix, be it in a virtual environment
|
||||||
|
or not.
|
||||||
|
|
||||||
|
For many users, **virtual environments** are more convenient than repeated
|
||||||
|
``spack load`` commands, particularly when working with multiple Python
|
||||||
|
packages. Fortunately Spack supports environments itself, which together
|
||||||
|
with a view are no different from Python virtual environments.
|
||||||
|
|
||||||
|
The recommended way of working with Python extensions such as ``py-numpy``
|
||||||
|
is through :ref:`Environments <environments>`. The following example creates
|
||||||
|
a Spack environment with ``numpy`` in the current working directory. It also
|
||||||
|
puts a filesystem view in ``./view``, which is a more traditional combined
|
||||||
|
prefix for all packages in the environment.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create --with-view view --dir .
|
||||||
|
$ spack -e . add py-numpy
|
||||||
|
$ spack -e . concretize
|
||||||
|
$ spack -e . install
|
||||||
|
|
||||||
|
Now you can activate the environment and start using the packages:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate .
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
The environment view is also a virtual environment, which is useful if you are
|
||||||
|
sharing the environment with others who are unfamiliar with Spack. They can
|
||||||
|
either use the Python executable directly:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ ./view/bin/python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
or use the activation script:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ source ./view/bin/activate
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
In general, there should not be much difference between ``spack env activate``
|
||||||
|
and using the virtual environment. The main advantage of ``spack env activate``
|
||||||
|
is that it knows about more packages than just Python packages, and it may set
|
||||||
|
additional runtime variables that are not covered by the virtual environment
|
||||||
|
activation script.
|
||||||
|
|
||||||
|
See :ref:`environments` for a more in-depth description of Spack
|
||||||
|
environments and customizations to views.
|
||||||
|
|
||||||
|
|
||||||
.. _sec-specs:
|
.. _sec-specs:
|
||||||
@@ -1119,6 +1198,9 @@ and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
|
|||||||
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
||||||
includes any version with major version ``3``.
|
includes any version with major version ``3``.
|
||||||
|
|
||||||
|
Versions are ordered lexicograpically by its components. For more details
|
||||||
|
on the order, see :ref:`the packaging guide <version-comparison>`.
|
||||||
|
|
||||||
Notice that you can distinguish between the specific version ``@=3.2`` and
|
Notice that you can distinguish between the specific version ``@=3.2`` and
|
||||||
the range ``@3.2``. This is useful for packages that follow a versioning
|
the range ``@3.2``. This is useful for packages that follow a versioning
|
||||||
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
||||||
@@ -1702,165 +1784,6 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
|
|
||||||
.. _extensions:
|
|
||||||
|
|
||||||
---------------------------
|
|
||||||
Extensions & Python support
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
Spack's installation model assumes that each package will live in its
|
|
||||||
own install prefix. However, certain packages are typically installed
|
|
||||||
*within* the directory hierarchy of other packages. For example,
|
|
||||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
|
||||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
|
||||||
|
|
||||||
In Spack, installation prefixes are immutable, so this type of installation
|
|
||||||
is not directly supported. However, it is possible to create views that
|
|
||||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
|
||||||
Views are a convenient way to get a more traditional filesystem structure.
|
|
||||||
Using *extensions*, you can ensure that Python packages always share the
|
|
||||||
same prefix in the view as Python itself. Suppose you have
|
|
||||||
Python installed like so:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack find python
|
|
||||||
==> 1 installed packages.
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
python@2.7.8
|
|
||||||
|
|
||||||
.. _cmd-spack-extensions:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
``spack extensions``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
You can find extensions for your Python installation like this:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack extensions python
|
|
||||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
|
||||||
==> 36 extensions:
|
|
||||||
geos py-ipython py-pexpect py-pyside py-sip
|
|
||||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
|
||||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
|
||||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
|
||||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
|
||||||
py-epydoc py-mx py-pylint py-scipy
|
|
||||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
|
||||||
py-h5py py-numpy py-pyqt py-shiboken
|
|
||||||
|
|
||||||
==> 12 installed:
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
|
||||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
|
||||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
|
||||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
|
||||||
|
|
||||||
The extensions are a subset of what's returned by ``spack list``, and
|
|
||||||
they are packages like any other. They are installed into their own
|
|
||||||
prefixes, and you can see this with ``spack find --paths``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack find --paths py-numpy
|
|
||||||
==> 1 installed packages.
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
|
||||||
|
|
||||||
However, even though this package is installed, you cannot use it
|
|
||||||
directly when you run ``python``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack load python
|
|
||||||
$ python
|
|
||||||
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
|
|
||||||
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
|
|
||||||
Type "help", "copyright", "credits" or "license" for more information.
|
|
||||||
>>> import numpy
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "<stdin>", line 1, in <module>
|
|
||||||
ImportError: No module named numpy
|
|
||||||
>>>
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using Extensions in Environments
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The recommended way of working with extensions such as ``py-numpy``
|
|
||||||
above is through :ref:`Environments <environments>`. For example,
|
|
||||||
the following creates an environment in the current working directory
|
|
||||||
with a filesystem view in the ``./view`` directory:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env create --with-view view --dir .
|
|
||||||
$ spack -e . add py-numpy
|
|
||||||
$ spack -e . concretize
|
|
||||||
$ spack -e . install
|
|
||||||
|
|
||||||
We recommend environments for two reasons. Firstly, environments
|
|
||||||
can be activated (requires :ref:`shell-support`):
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env activate .
|
|
||||||
|
|
||||||
which sets all the right environment variables such as ``PATH`` and
|
|
||||||
``PYTHONPATH``. This ensures that
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
works. Secondly, even without shell support, the view ensures
|
|
||||||
that Python can locate its extensions:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ ./view/bin/python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
See :ref:`environments` for a more in-depth description of Spack
|
|
||||||
environments and customizations to views.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using ``spack load``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
A more traditional way of using Spack and extensions is ``spack load``
|
|
||||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
|
||||||
in your current shell, and Python itself will be available in the ``PATH``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack load py-numpy
|
|
||||||
$ python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
The loaded packages can be checked using ``spack find --loaded``
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Loading Extensions via Modules
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
|
||||||
through your environment modules (using ``environment-modules`` or
|
|
||||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
|
||||||
your current shell.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ module load <name of numpy module>
|
|
||||||
|
|
||||||
If you do not know the name of the specific numpy module you wish to
|
|
||||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
|
||||||
the name of the module from the Spack spec.
|
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|||||||
@@ -220,6 +220,40 @@ section of the configuration:
|
|||||||
|
|
||||||
.. _binary_caches_oci:
|
.. _binary_caches_oci:
|
||||||
|
|
||||||
|
---------------------------------
|
||||||
|
Automatic push to a build cache
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack mirror add --autopush <name> <url or path>
|
||||||
|
|
||||||
|
Or the autopush flag can be set for an existing mirror:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
|
||||||
|
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
|
||||||
|
|
||||||
|
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install <package>
|
||||||
|
|
||||||
|
will have the same effect as
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install <package>
|
||||||
|
$ spack buildcache push <cache> <package> # for all caches with autopush: true
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Packages are automatically pushed to a build cache only if they are built from source.
|
||||||
|
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
OCI / Docker V2 registries as build cache
|
OCI / Docker V2 registries as build cache
|
||||||
-----------------------------------------
|
-----------------------------------------
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ You can check what is installed in the bootstrapping store at any time using:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
% spack find -b
|
% spack -b find
|
||||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||||
==> 11 installed packages
|
==> 11 installed packages
|
||||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||||
@@ -101,7 +101,7 @@ In case it is needed you can remove all the software in the current bootstrappin
|
|||||||
% spack clean -b
|
% spack clean -b
|
||||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||||
|
|
||||||
% spack find -b
|
% spack -b find
|
||||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||||
==> 0 installed packages
|
==> 0 installed packages
|
||||||
|
|
||||||
@@ -175,4 +175,4 @@ bootstrapping.
|
|||||||
|
|
||||||
This command needs to be run on a machine with internet access and the resulting folder
|
This command needs to be run on a machine with internet access and the resulting folder
|
||||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
commands suggested at the prompt, they can be used to bootstrap Spack.
|
||||||
|
|||||||
@@ -21,23 +21,86 @@ is the following:
|
|||||||
Reuse already installed packages
|
Reuse already installed packages
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
The ``reuse`` attribute controls how aggressively Spack reuses binary packages during concretization. The
|
||||||
whether it will do a "fresh" installation and prefer the latest settings from
|
attribute can either be a single value, or an object for more complex configurations.
|
||||||
``package.py`` files and ``packages.yaml`` (``false``).
|
|
||||||
You can use:
|
In the former case ("single value") it allows Spack to:
|
||||||
|
|
||||||
|
1. Reuse installed packages and buildcaches for all the specs to be concretized, when ``true``
|
||||||
|
2. Reuse installed packages and buildcaches only for the dependencies of the root specs, when ``dependencies``
|
||||||
|
3. Disregard reusing installed packages and buildcaches, when ``false``
|
||||||
|
|
||||||
|
In case a finer control over which specs are reused is needed, then the value of this attribute can be
|
||||||
|
an object, with the following keys:
|
||||||
|
|
||||||
|
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
|
||||||
|
2. ``from``: list of sources from which reused specs are taken
|
||||||
|
|
||||||
|
Each source in ``from`` is itself an object:
|
||||||
|
|
||||||
|
.. list-table:: Attributes for a source or reusable specs
|
||||||
|
:header-rows: 1
|
||||||
|
|
||||||
|
* - Attribute name
|
||||||
|
- Description
|
||||||
|
* - type (mandatory, string)
|
||||||
|
- Can be ``local``, ``buildcache``, or ``external``
|
||||||
|
* - include (optional, list of specs)
|
||||||
|
- If present, reusable specs must match at least one of the constraint in the list
|
||||||
|
* - exclude (optional, list of specs)
|
||||||
|
- If present, reusable specs must not match any of the constraint in the list.
|
||||||
|
|
||||||
|
For instance, the following configuration:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
from:
|
||||||
|
- type: local
|
||||||
|
include:
|
||||||
|
- "%gcc"
|
||||||
|
- "%clang"
|
||||||
|
|
||||||
|
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
|
||||||
|
in the local store. Any spec from remote buildcaches is disregarded.
|
||||||
|
|
||||||
|
To reduce the boilerplate in configuration files, default values for the ``include`` and
|
||||||
|
``exclude`` options can be pushed up one level:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
include:
|
||||||
|
- "%gcc"
|
||||||
|
from:
|
||||||
|
- type: local
|
||||||
|
- type: buildcache
|
||||||
|
- type: local
|
||||||
|
include:
|
||||||
|
- "foo %oneapi"
|
||||||
|
|
||||||
|
In the example above we reuse all specs compiled with ``gcc`` from the local store
|
||||||
|
and remote buildcaches, and we also reuse ``foo %oneapi``. Note that the last source of
|
||||||
|
specs override the default ``include`` attribute.
|
||||||
|
|
||||||
|
For one-off concretizations, the are command line arguments for each of the simple "single value"
|
||||||
|
configurations. This means a user can:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
% spack install --reuse <spec>
|
% spack install --reuse <spec>
|
||||||
|
|
||||||
to enable reuse for a single installation, and you can use:
|
to enable reuse for a single installation, or:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
spack install --fresh <spec>
|
spack install --fresh <spec>
|
||||||
|
|
||||||
to do a fresh install if ``reuse`` is enabled by default.
|
to do a fresh install if ``reuse`` is enabled by default.
|
||||||
``reuse: dependencies`` is the default.
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
|
|||||||
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
generator = "Ninja"
|
generator("ninja")
|
||||||
|
|
||||||
|
|
||||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||||
|
|||||||
@@ -173,6 +173,72 @@ arguments to ``Makefile.PL`` or ``Build.PL`` by overriding
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^
|
||||||
|
Testing
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
``PerlPackage`` provides a simple stand-alone test of the successfully
|
||||||
|
installed package to confirm that installed perl module(s) can be used.
|
||||||
|
These tests can be performed any time after the installation using
|
||||||
|
``spack -v test run``. (For more information on the command, see
|
||||||
|
:ref:`cmd-spack-test-run`.)
|
||||||
|
|
||||||
|
The base class automatically detects perl modules based on the presence
|
||||||
|
of ``*.pm`` files under the package's library directory. For example,
|
||||||
|
the files under ``perl-bignum``'s perl library are:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ find . -name "*.pm"
|
||||||
|
./bigfloat.pm
|
||||||
|
./bigrat.pm
|
||||||
|
./Math/BigFloat/Trace.pm
|
||||||
|
./Math/BigInt/Trace.pm
|
||||||
|
./Math/BigRat/Trace.pm
|
||||||
|
./bigint.pm
|
||||||
|
./bignum.pm
|
||||||
|
|
||||||
|
|
||||||
|
which results in the package having the ``use_modules`` property containing:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
use_modules = [
|
||||||
|
"bigfloat",
|
||||||
|
"bigrat",
|
||||||
|
"Math::BigFloat::Trace",
|
||||||
|
"Math::BigInt::Trace",
|
||||||
|
"Math::BigRat::Trace",
|
||||||
|
"bigint",
|
||||||
|
"bignum",
|
||||||
|
]
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
This list can often be used to catch missing dependencies.
|
||||||
|
|
||||||
|
If the list is somehow wrong, you can provide the names of the modules
|
||||||
|
yourself by overriding ``use_modules`` like so:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
use_modules = ["bigfloat", "bigrat", "bigint", "bignum"]
|
||||||
|
|
||||||
|
If you only want a subset of the automatically detected modules to be
|
||||||
|
tested, you could instead define the ``skip_modules`` property on the
|
||||||
|
package. So, instead of overriding ``use_modules`` as shown above, you
|
||||||
|
could define the following:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
skip_modules = [
|
||||||
|
"Math::BigFloat::Trace",
|
||||||
|
"Math::BigInt::Trace",
|
||||||
|
"Math::BigRat::Trace",
|
||||||
|
]
|
||||||
|
|
||||||
|
for the same use tests.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Alternatives to Spack
|
Alternatives to Spack
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -718,23 +718,45 @@ command-line tool, or C/C++/Fortran program with optional Python
|
|||||||
modules? The former should be prepended with ``py-``, while the
|
modules? The former should be prepended with ``py-``, while the
|
||||||
latter should not.
|
latter should not.
|
||||||
|
|
||||||
""""""""""""""""""""""
|
""""""""""""""""""""""""""""""
|
||||||
extends vs. depends_on
|
``extends`` vs. ``depends_on``
|
||||||
""""""""""""""""""""""
|
""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
This is very similar to the naming dilemma above, with a slight twist.
|
|
||||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||||
that the extension and extendee share the same prefix in views.
|
that the extension and extendee share the same prefix in views.
|
||||||
This allows the user to import a Python module without
|
This allows the user to import a Python module without
|
||||||
having to add that module to ``PYTHONPATH``.
|
having to add that module to ``PYTHONPATH``.
|
||||||
|
|
||||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
Additionally, ``extends("python")`` adds a dependency on the package
|
||||||
thumb is to check the installation prefix. If Python libraries are
|
``python-venv``. This improves isolation from the system, whether
|
||||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
it's during the build or at runtime: user and system site packages
|
||||||
should use ``extends``. If Python libraries are installed elsewhere
|
cannot accidentally be used by any package that ``extends("python")``.
|
||||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
|
||||||
don't use ``extends``.
|
As a rule of thumb: if a package does not install any Python modules
|
||||||
|
of its own, and merely puts a Python script in the ``bin`` directory,
|
||||||
|
then there is no need for ``extends``. If the package installs modules
|
||||||
|
in the ``site-packages`` directory, it requires ``extends``.
|
||||||
|
|
||||||
|
"""""""""""""""""""""""""""""""""""""
|
||||||
|
Executing ``python`` during the build
|
||||||
|
"""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
Whenever you need to execute a Python command or pass the path of the
|
||||||
|
Python interpreter to the build system, it is best to use the global
|
||||||
|
variable ``python`` directly. For example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
@run_before("install")
|
||||||
|
def recythonize(self):
|
||||||
|
python("setup.py", "clean") # use the `python` global
|
||||||
|
|
||||||
|
As mentioned in the previous section, ``extends("python")`` adds an
|
||||||
|
automatic dependency on ``python-venv``, which is a virtual environment
|
||||||
|
that guarantees build isolation. The ``python`` global always refers to
|
||||||
|
the correct Python interpreter, whether the package uses ``extends("python")``
|
||||||
|
or ``depends_on("python")``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Alternatives to Spack
|
Alternatives to Spack
|
||||||
|
|||||||
@@ -145,6 +145,25 @@ hosts when making ``ssl`` connections. Set to ``false`` to disable, and
|
|||||||
tools like ``curl`` will use their ``--insecure`` options. Disabling
|
tools like ``curl`` will use their ``--insecure`` options. Disabling
|
||||||
this can expose you to attacks. Use at your own risk.
|
this can expose you to attacks. Use at your own risk.
|
||||||
|
|
||||||
|
--------------------
|
||||||
|
``ssl_certs``
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Path to custom certificats for SSL verification. The value can be a
|
||||||
|
filesytem path, or an environment variable that expands to an absolute file path.
|
||||||
|
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||||
|
to use the same syntax used by many other applications that automatically
|
||||||
|
detect custom certificates.
|
||||||
|
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||||
|
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||||
|
in the subprocess calling ``curl``.
|
||||||
|
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||||
|
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||||
|
will work.
|
||||||
|
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||||
|
Certificates relative to an environment can be created by prepending the path variable
|
||||||
|
with the Spack configuration variable``$env``.
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``checksum``
|
``checksum``
|
||||||
--------------------
|
--------------------
|
||||||
|
|||||||
@@ -73,9 +73,12 @@ are six configuration scopes. From lowest to highest:
|
|||||||
Spack instance per project) or for site-wide settings on a multi-user
|
Spack instance per project) or for site-wide settings on a multi-user
|
||||||
machine (e.g., for a common Spack instance).
|
machine (e.g., for a common Spack instance).
|
||||||
|
|
||||||
|
#. **plugin**: Read from a Python project's entry points. Settings here affect
|
||||||
|
all instances of Spack running with the same Python installation. This scope takes higher precedence than site, system, and default scopes.
|
||||||
|
|
||||||
#. **user**: Stored in the home directory: ``~/.spack/``. These settings
|
#. **user**: Stored in the home directory: ``~/.spack/``. These settings
|
||||||
affect all instances of Spack and take higher precedence than site,
|
affect all instances of Spack and take higher precedence than site,
|
||||||
system, or defaults scopes.
|
system, plugin, or defaults scopes.
|
||||||
|
|
||||||
#. **custom**: Stored in a custom directory specified by ``--config-scope``.
|
#. **custom**: Stored in a custom directory specified by ``--config-scope``.
|
||||||
If multiple scopes are listed on the command line, they are ordered
|
If multiple scopes are listed on the command line, they are ordered
|
||||||
@@ -196,6 +199,45 @@ with MPICH. You can create different configuration scopes for use with
|
|||||||
mpi: [mpich]
|
mpi: [mpich]
|
||||||
|
|
||||||
|
|
||||||
|
.. _plugin-scopes:
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^
|
||||||
|
Plugin scopes
|
||||||
|
^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Python version >= 3.8 is required to enable plugin configuration.
|
||||||
|
|
||||||
|
Spack can be made aware of configuration scopes that are installed as part of a python package. To do so, register a function that returns the scope's path to the ``"spack.config"`` entry point. Consider the Python package ``my_package`` that includes Spack configurations:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
my-package/
|
||||||
|
├── src
|
||||||
|
│ ├── my_package
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ └── spack/
|
||||||
|
│ │ │ └── config.yaml
|
||||||
|
└── pyproject.toml
|
||||||
|
|
||||||
|
adding the following to ``my_package``'s ``pyproject.toml`` will make ``my_package``'s ``spack/`` configurations visible to Spack when ``my_package`` is installed:
|
||||||
|
|
||||||
|
.. code-block:: toml
|
||||||
|
|
||||||
|
[project.entry_points."spack.config"]
|
||||||
|
my_package = "my_package:get_config_path"
|
||||||
|
|
||||||
|
The function ``my_package.get_extension_path`` in ``my_package/__init__.py`` might look like
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import importlib.resources
|
||||||
|
|
||||||
|
def get_config_path():
|
||||||
|
dirname = importlib.resources.files("my_package").joinpath("spack")
|
||||||
|
if dirname.exists():
|
||||||
|
return str(dirname)
|
||||||
|
|
||||||
.. _platform-scopes:
|
.. _platform-scopes:
|
||||||
|
|
||||||
------------------------
|
------------------------
|
||||||
|
|||||||
@@ -194,15 +194,15 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Operating System
|
* - Operating System
|
||||||
- Base Image
|
- Base Image
|
||||||
- Spack Image
|
- Spack Image
|
||||||
* - Ubuntu 18.04
|
|
||||||
- ``ubuntu:18.04``
|
|
||||||
- ``spack/ubuntu-bionic``
|
|
||||||
* - Ubuntu 20.04
|
* - Ubuntu 20.04
|
||||||
- ``ubuntu:20.04``
|
- ``ubuntu:20.04``
|
||||||
- ``spack/ubuntu-focal``
|
- ``spack/ubuntu-focal``
|
||||||
* - Ubuntu 22.04
|
* - Ubuntu 22.04
|
||||||
- ``ubuntu:22.04``
|
- ``ubuntu:22.04``
|
||||||
- ``spack/ubuntu-jammy``
|
- ``spack/ubuntu-jammy``
|
||||||
|
* - Ubuntu 24.04
|
||||||
|
- ``ubuntu:24.04``
|
||||||
|
- ``spack/ubuntu-noble``
|
||||||
* - CentOS 7
|
* - CentOS 7
|
||||||
- ``centos:7``
|
- ``centos:7``
|
||||||
- ``spack/centos7``
|
- ``spack/centos7``
|
||||||
@@ -227,12 +227,12 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Rocky Linux 9
|
* - Rocky Linux 9
|
||||||
- ``rockylinux:9``
|
- ``rockylinux:9``
|
||||||
- ``spack/rockylinux9``
|
- ``spack/rockylinux9``
|
||||||
* - Fedora Linux 37
|
* - Fedora Linux 39
|
||||||
- ``fedora:37``
|
- ``fedora:39``
|
||||||
- ``spack/fedora37``
|
- ``spack/fedora39``
|
||||||
* - Fedora Linux 38
|
* - Fedora Linux 40
|
||||||
- ``fedora:38``
|
- ``fedora:40``
|
||||||
- ``spack/fedora38``
|
- ``spack/fedora40``
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -552,11 +552,11 @@ With either interpreter you can run a single command:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack python -c 'import distro; distro.linux_distribution()'
|
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||||
('Ubuntu', '18.04', 'Bionic Beaver')
|
...
|
||||||
|
|
||||||
$ spack python -i ipython -c 'import distro; distro.linux_distribution()'
|
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||||
Out[1]: ('Ubuntu', '18.04', 'Bionic Beaver')
|
Out[1]: ...
|
||||||
|
|
||||||
or a file:
|
or a file:
|
||||||
|
|
||||||
@@ -1071,9 +1071,9 @@ Announcing a release
|
|||||||
|
|
||||||
We announce releases in all of the major Spack communication channels.
|
We announce releases in all of the major Spack communication channels.
|
||||||
Publishing the release takes care of GitHub. The remaining channels are
|
Publishing the release takes care of GitHub. The remaining channels are
|
||||||
Twitter, Slack, and the mailing list. Here are the steps:
|
X, Slack, and the mailing list. Here are the steps:
|
||||||
|
|
||||||
#. Announce the release on Twitter.
|
#. Announce the release on X.
|
||||||
|
|
||||||
* Compose the tweet on the ``@spackpm`` account per the
|
* Compose the tweet on the ``@spackpm`` account per the
|
||||||
``spack-twitter`` slack channel.
|
``spack-twitter`` slack channel.
|
||||||
|
|||||||
@@ -142,12 +142,8 @@ user's prompt to begin with the environment name in brackets.
|
|||||||
$ spack env activate -p myenv
|
$ spack env activate -p myenv
|
||||||
[myenv] $ ...
|
[myenv] $ ...
|
||||||
|
|
||||||
The ``activate`` command can also be used to create a new environment, if it is
|
The ``activate`` command can also be used to create a new environment if it does not already
|
||||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
exist.
|
||||||
environments, anonymous environments are explained in the next section,
|
|
||||||
can both be created using the same flags that `spack env create` accepts.
|
|
||||||
If an environment already exists then spack will simply activate it and ignore the
|
|
||||||
create specific flags.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -176,21 +172,36 @@ environment will remove the view from the user environment.
|
|||||||
Anonymous Environments
|
Anonymous Environments
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Any directory can be treated as an environment if it contains a file
|
Apart from managed environments, Spack also supports anonymous environments.
|
||||||
``spack.yaml``. To load an anonymous environment, use:
|
|
||||||
|
Anonymous environments can be placed in any directory of choice.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||||
|
that are still used in a managed environment. This is not the case for anonymous
|
||||||
|
environments.
|
||||||
|
|
||||||
|
To create an anonymous environment, use one of the following commands:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate -d /path/to/directory
|
$ spack env create --dir my_env
|
||||||
|
$ spack env create ./my_env
|
||||||
|
|
||||||
Anonymous specs can be created in place using the command:
|
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||||
|
already exist:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create -d .
|
$ spack env activate --create ./my_env
|
||||||
|
|
||||||
|
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate --temp
|
||||||
|
|
||||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
|
||||||
directory.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Environment Sensitive Commands
|
Environment Sensitive Commands
|
||||||
@@ -449,6 +460,125 @@ Sourcing that file in Bash will make the environment available to the
|
|||||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||||
file may also be copied out of the environment, renamed, etc.
|
file may also be copied out of the environment, renamed, etc.
|
||||||
|
|
||||||
|
|
||||||
|
.. _environment_include_concrete:
|
||||||
|
|
||||||
|
------------------------------
|
||||||
|
Included Concrete Environments
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
Spack environments can create an environment based off of information in already
|
||||||
|
established environments. You can think of it as a combination of existing
|
||||||
|
environments. It will gather information from the existing environment's
|
||||||
|
``spack.lock`` and use that during the creation of this included concrete
|
||||||
|
environment. When an included concrete environment is created it will generate
|
||||||
|
a ``spack.lock`` file for the newly created environment.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Creating included environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
To create a combined concrete environment, you must have at least one existing
|
||||||
|
concrete environment. You will use the command ``spack env create`` with the
|
||||||
|
argument ``--include-concrete`` followed by the name or path of the environment
|
||||||
|
you'd like to include. Here is an example of how to create a combined environment
|
||||||
|
from the command line.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||||
|
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||||
|
absolute path to the independent environments.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
include_concrete:
|
||||||
|
- /absolute/path/to/environment1
|
||||||
|
- /absolute/path/to/environment2
|
||||||
|
|
||||||
|
|
||||||
|
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||||
|
get the concrete specs from the included environments.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Updating an included environment
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
If changes were made to the base environment and you want that reflected in the
|
||||||
|
included environment you will need to reconcretize both the base environment and the
|
||||||
|
included environment for the change to be implemented. For example:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
Here we see that ``included_env`` has access to the python package through
|
||||||
|
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||||
|
``included_env`` will not be able to access the new information.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e myenv add perl
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
It isn't until you run the ``spack concretize`` command that the combined
|
||||||
|
environment will get the updated information from the reconcretized base environmennt.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e included_env concretize
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
.. _environment-configuration:
|
.. _environment-configuration:
|
||||||
|
|
||||||
------------------------
|
------------------------
|
||||||
@@ -800,6 +930,7 @@ For example, the following environment has three root packages:
|
|||||||
This allows for a much-needed reduction in redundancy between packages
|
This allows for a much-needed reduction in redundancy between packages
|
||||||
and constraints.
|
and constraints.
|
||||||
|
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
Filesystem Views
|
Filesystem Views
|
||||||
----------------
|
----------------
|
||||||
@@ -952,6 +1083,17 @@ function, as shown in the example below:
|
|||||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||||
|
|
||||||
|
Projections also permit environment and spack configuration variable
|
||||||
|
expansions as shown below:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
projections:
|
||||||
|
all: "{name}-{version}/{compiler.name}-{compiler.version}/$date/$SYSTEM_ENV_VARIBLE"
|
||||||
|
|
||||||
|
where ``$date`` is the spack configuration variable that will expand with the ``YYYY-MM-DD``
|
||||||
|
format and ``$SYSTEM_ENV_VARIABLE`` is an environment variable defined in the shell.
|
||||||
|
|
||||||
The entries in the projections configuration file must all be either
|
The entries in the projections configuration file must all be either
|
||||||
specs or the keyword ``all``. For each spec, the projection used will
|
specs or the keyword ``all``. For each spec, the projection used will
|
||||||
be the first non-``all`` entry that the spec satisfies, or ``all`` if
|
be the first non-``all`` entry that the spec satisfies, or ``all`` if
|
||||||
@@ -1022,7 +1164,7 @@ other targets to depend on the environment installation.
|
|||||||
|
|
||||||
A typical workflow is as follows:
|
A typical workflow is as follows:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
spack env create -d .
|
spack env create -d .
|
||||||
spack -e . add perl
|
spack -e . add perl
|
||||||
@@ -1115,7 +1257,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
|||||||
dependencies. Below we show a use case where a spec is installed with verbose
|
dependencies. Below we show a use case where a spec is installed with verbose
|
||||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o Makefile
|
$ spack env depfile -o Makefile
|
||||||
|
|
||||||
@@ -1137,7 +1279,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
|||||||
variable. Assuming we have an active and concrete environment, we generate the
|
variable. Assuming we have an active and concrete environment, we generate the
|
||||||
associated ``Makefile`` with a prefix ``example``:
|
associated ``Makefile`` with a prefix ``example``:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o env.mk --make-prefix example
|
$ spack env depfile -o env.mk --make-prefix example
|
||||||
|
|
||||||
|
|||||||
@@ -111,3 +111,39 @@ The corresponding unit tests can be run giving the appropriate options to ``spac
|
|||||||
|
|
||||||
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||||
=========================================== 5 passed in 5.06s ============================================
|
=========================================== 5 passed in 5.06s ============================================
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
|
Registering Extensions via Entry Points
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Python version >= 3.8 is required to register extensions via entry points.
|
||||||
|
|
||||||
|
Spack can be made aware of extensions that are installed as part of a python package. To do so, register a function that returns the extension path, or paths, to the ``"spack.extensions"`` entry point. Consider the Python package ``my_package`` that includes a Spack extension:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
my-package/
|
||||||
|
├── src
|
||||||
|
│ ├── my_package
|
||||||
|
│ │ └── __init__.py
|
||||||
|
│ └── spack-scripting/ # the spack extensions
|
||||||
|
└── pyproject.toml
|
||||||
|
|
||||||
|
adding the following to ``my_package``'s ``pyproject.toml`` will make the ``spack-scripting`` extension visible to Spack when ``my_package`` is installed:
|
||||||
|
|
||||||
|
.. code-block:: toml
|
||||||
|
|
||||||
|
[project.entry_points."spack.extenions"]
|
||||||
|
my_package = "my_package:get_extension_path"
|
||||||
|
|
||||||
|
The function ``my_package.get_extension_path`` in ``my_package/__init__.py`` might look like
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import importlib.resources
|
||||||
|
|
||||||
|
def get_extension_path():
|
||||||
|
dirname = importlib.resources.files("my_package").joinpath("spack-scripting")
|
||||||
|
if dirname.exists():
|
||||||
|
return str(dirname)
|
||||||
|
|||||||
@@ -250,9 +250,10 @@ Compiler configuration
|
|||||||
|
|
||||||
Spack has the ability to build packages with multiple compilers and
|
Spack has the ability to build packages with multiple compilers and
|
||||||
compiler versions. Compilers can be made available to Spack by
|
compiler versions. Compilers can be made available to Spack by
|
||||||
specifying them manually in ``compilers.yaml``, or automatically by
|
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||||
running ``spack compiler find``, but for convenience Spack will
|
or automatically by running ``spack compiler find``, but for
|
||||||
automatically detect compilers the first time it needs them.
|
convenience Spack will automatically detect compilers the first time
|
||||||
|
it needs them.
|
||||||
|
|
||||||
.. _cmd-spack-compilers:
|
.. _cmd-spack-compilers:
|
||||||
|
|
||||||
@@ -457,6 +458,54 @@ specification. The operations available to modify the environment are ``set``, `
|
|||||||
prepend_path: # Similar for append|remove_path
|
prepend_path: # Similar for append|remove_path
|
||||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Spack is in the process of moving compilers from a separate
|
||||||
|
attribute to be handled like all other packages. As part of this
|
||||||
|
process, the ``compilers.yaml`` section will eventually be replaced
|
||||||
|
by configuration in the ``packages.yaml`` section. This new
|
||||||
|
configuration is now available, although it is not yet the default
|
||||||
|
behavior.
|
||||||
|
|
||||||
|
Compilers can also be configured as external packages in the
|
||||||
|
``packages.yaml`` config file. Any external package for a compiler
|
||||||
|
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||||
|
assuming the paths to the compiler executables are determinable from
|
||||||
|
the prefix.
|
||||||
|
|
||||||
|
If the paths to the compiler executable are not determinable from the
|
||||||
|
prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||||
|
all other fields from the compilers config can be added to the
|
||||||
|
``extra_attributes`` field for an external representing a compiler.
|
||||||
|
|
||||||
|
Note that the format for the ``paths`` field in the
|
||||||
|
``extra_attributes`` section is different than in the ``compilers``
|
||||||
|
config. For compilers configured as external packages, the section is
|
||||||
|
named ``compilers`` and the dictionary maps language names (``c``,
|
||||||
|
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||||
|
``fc``, and ``f77``.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
packages:
|
||||||
|
gcc:
|
||||||
|
external:
|
||||||
|
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||||
|
prefix: /usr
|
||||||
|
extra_attributes:
|
||||||
|
environment:
|
||||||
|
set:
|
||||||
|
GCC_ROOT: /usr
|
||||||
|
external:
|
||||||
|
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||||
|
prefix: /usr
|
||||||
|
extra_attributes:
|
||||||
|
compilers:
|
||||||
|
c: /usr/bin/clang-with-suffix
|
||||||
|
cxx: /usr/bin/clang++-with-extra-info
|
||||||
|
fortran: /usr/bin/gfortran
|
||||||
|
extra_rpaths:
|
||||||
|
- /usr/lib/llvm/
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Build Your Own Compiler
|
Build Your Own Compiler
|
||||||
@@ -1529,6 +1578,8 @@ Microsoft Visual Studio
|
|||||||
"""""""""""""""""""""""
|
"""""""""""""""""""""""
|
||||||
|
|
||||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||||
|
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||||
|
visual studio installation as it is required to build many packages from source.
|
||||||
|
|
||||||
We require several specific components to be included in the Visual Studio installation.
|
We require several specific components to be included in the Visual Studio installation.
|
||||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||||
@@ -1536,6 +1587,7 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
|||||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||||
This provides CMake and Ninja for use during Spack configuration.
|
This provides CMake and Ninja for use during Spack configuration.
|
||||||
|
|
||||||
|
|
||||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||||
"Installation details" pane on the right.
|
"Installation details" pane on the right.
|
||||||
|
|||||||
@@ -273,9 +273,21 @@ builtin support through the ``depends_on`` function, the latter simply uses a ``
|
|||||||
statement. Both module systems (at least in newer versions) do reference counting, so that if a
|
statement. Both module systems (at least in newer versions) do reference counting, so that if a
|
||||||
module is loaded by two different modules, it will only be unloaded after the others are.
|
module is loaded by two different modules, it will only be unloaded after the others are.
|
||||||
|
|
||||||
The ``autoload`` key accepts the values ``none``, ``direct``, and ``all``. To disable it, use
|
The ``autoload`` key accepts the values:
|
||||||
``none``, and to enable, it's best to stick to ``direct``, which only autoloads the direct link and
|
|
||||||
run type dependencies, relying on recursive autoloading to load the rest.
|
* ``none``: no autoloading
|
||||||
|
* ``run``: autoload direct *run* type dependencies
|
||||||
|
* ``direct``: autoload direct *link and run* type dependencies
|
||||||
|
* ``all``: autoload all dependencies
|
||||||
|
|
||||||
|
In case of ``run`` and ``direct``, a ``module load`` triggers a recursive load.
|
||||||
|
|
||||||
|
The ``direct`` option is most correct: there are cases where pure link dependencies need to set
|
||||||
|
variables for themselves, or need to have variables of their own dependencies set.
|
||||||
|
|
||||||
|
In practice however, ``run`` is often sufficient, and may make ``module load`` snappier.
|
||||||
|
|
||||||
|
The ``all`` option is discouraged and seldomly used.
|
||||||
|
|
||||||
A common complaint about autoloading is the large number of modules that are visible to the user.
|
A common complaint about autoloading is the large number of modules that are visible to the user.
|
||||||
Spack has a solution for this as well: ``hide_implicits: true``. This ensures that only those
|
Spack has a solution for this as well: ``hide_implicits: true``. This ensures that only those
|
||||||
@@ -297,11 +309,11 @@ Environment Modules requires version 4.7 or higher.
|
|||||||
tcl:
|
tcl:
|
||||||
hide_implicits: true
|
hide_implicits: true
|
||||||
all:
|
all:
|
||||||
autoload: direct
|
autoload: direct # or `run`
|
||||||
lmod:
|
lmod:
|
||||||
hide_implicits: true
|
hide_implicits: true
|
||||||
all:
|
all:
|
||||||
autoload: direct
|
autoload: direct # or `run`
|
||||||
|
|
||||||
.. _anonymous_specs:
|
.. _anonymous_specs:
|
||||||
|
|
||||||
|
|||||||
@@ -893,26 +893,50 @@ as an option to the ``version()`` directive. Example situations would be a
|
|||||||
"snapshot"-like Version Control System (VCS) tag, a VCS branch such as
|
"snapshot"-like Version Control System (VCS) tag, a VCS branch such as
|
||||||
``v6-16-00-patches``, or a URL specifying a regularly updated snapshot tarball.
|
``v6-16-00-patches``, or a URL specifying a regularly updated snapshot tarball.
|
||||||
|
|
||||||
|
|
||||||
|
.. _version-comparison:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
Version comparison
|
Version comparison
|
||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Spack imposes a generic total ordering on the set of versions,
|
||||||
|
independently from the package they are associated with.
|
||||||
|
|
||||||
Most Spack versions are numeric, a tuple of integers; for example,
|
Most Spack versions are numeric, a tuple of integers; for example,
|
||||||
``0.1``, ``6.96`` or ``1.2.3.1``. Spack knows how to compare and sort
|
``0.1``, ``6.96`` or ``1.2.3.1``. In this very basic case, version
|
||||||
numeric versions.
|
comparison is lexicographical on the numeric components:
|
||||||
|
``1.2 < 1.2.1 < 1.2.2 < 1.10``.
|
||||||
|
|
||||||
Some Spack versions involve slight extensions of numeric syntax; for
|
Spack can also supports string components such as ``1.1.1a`` and
|
||||||
example, ``py-sphinx-rtd-theme@=0.1.10a0``. In this case, numbers are
|
``1.y.0``. String components are considered less than numeric
|
||||||
always considered to be "newer" than letters. This is for consistency
|
components, so ``1.y.0 < 1.0``. This is for consistency with
|
||||||
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
|
`RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_. String
|
||||||
|
components do not have to be separated by dots or any other delimiter.
|
||||||
|
So, the contrived version ``1y0`` is identical to ``1.y.0``.
|
||||||
|
|
||||||
Spack versions may also be arbitrary non-numeric strings, for example
|
Pre-release suffixes also contain string parts, but they are handled
|
||||||
``develop``, ``master``, ``local``.
|
in a special way. For example ``1.2.3alpha1`` is parsed as a pre-release
|
||||||
|
of the version ``1.2.3``. This allows Spack to order it before the
|
||||||
|
actual release: ``1.2.3alpha1 < 1.2.3``. Spack supports alpha, beta and
|
||||||
|
release candidate suffixes: ``1.2alpha1 < 1.2beta1 < 1.2rc1 < 1.2``. Any
|
||||||
|
suffix not recognized as a pre-release is treated as an ordinary
|
||||||
|
string component, so ``1.2 < 1.2-mysuffix``.
|
||||||
|
|
||||||
The order on versions is defined as follows. A version string is split
|
Finally, there are a few special string components that are considered
|
||||||
into a list of components based on delimiters such as ``.``, ``-`` etc.
|
"infinity versions". They include ``develop``, ``main``, ``master``,
|
||||||
Lists are then ordered lexicographically, where components are ordered
|
``head``, ``trunk``, and ``stable``. For example: ``1.2 < develop``.
|
||||||
as follows:
|
These are useful for specifying the most recent development version of
|
||||||
|
a package (often a moving target like a git branch), without assigning
|
||||||
|
a specific version number. Infinity versions are not automatically used when determining the latest version of a package unless explicitly required by another package or user.
|
||||||
|
|
||||||
|
More formally, the order on versions is defined as follows. A version
|
||||||
|
string is split into a list of components based on delimiters such as
|
||||||
|
``.`` and ``-`` and string boundaries. The components are split into
|
||||||
|
the **release** and a possible **pre-release** (if the last component
|
||||||
|
is numeric and the second to last is a string ``alpha``, ``beta`` or ``rc``).
|
||||||
|
The release components are ordered lexicographically, with comparsion
|
||||||
|
between different types of components as follows:
|
||||||
|
|
||||||
#. The following special strings are considered larger than any other
|
#. The following special strings are considered larger than any other
|
||||||
numeric or non-numeric version component, and satisfy the following
|
numeric or non-numeric version component, and satisfy the following
|
||||||
@@ -925,6 +949,9 @@ as follows:
|
|||||||
#. All other non-numeric components are less than numeric components,
|
#. All other non-numeric components are less than numeric components,
|
||||||
and are ordered alphabetically.
|
and are ordered alphabetically.
|
||||||
|
|
||||||
|
Finally, if the release components are equal, the pre-release components
|
||||||
|
are used to break the tie, in the obvious way.
|
||||||
|
|
||||||
The logic behind this sort order is two-fold:
|
The logic behind this sort order is two-fold:
|
||||||
|
|
||||||
#. Non-numeric versions are usually used for special cases while
|
#. Non-numeric versions are usually used for special cases while
|
||||||
@@ -6408,9 +6435,12 @@ the ``paths`` attribute:
|
|||||||
echo "Target: x86_64-pc-linux-gnu"
|
echo "Target: x86_64-pc-linux-gnu"
|
||||||
echo "Thread model: posix"
|
echo "Thread model: posix"
|
||||||
echo "InstalledDir: /usr/bin"
|
echo "InstalledDir: /usr/bin"
|
||||||
|
platforms: ["linux", "darwin"]
|
||||||
results:
|
results:
|
||||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||||
|
|
||||||
|
If the ``platforms`` attribute is present, tests are run only if the current host
|
||||||
|
matches one of the listed platforms.
|
||||||
Each test is performed by first creating a temporary directory structure as
|
Each test is performed by first creating a temporary directory structure as
|
||||||
specified in the corresponding ``layout`` and by then running
|
specified in the corresponding ``layout`` and by then running
|
||||||
package detection and checking that the outcome matches the expected
|
package detection and checking that the outcome matches the expected
|
||||||
@@ -6444,6 +6474,10 @@ package detection and checking that the outcome matches the expected
|
|||||||
- A spec that is expected from detection
|
- A spec that is expected from detection
|
||||||
- Any valid spec
|
- Any valid spec
|
||||||
- Yes
|
- Yes
|
||||||
|
* - ``results:[0]:extra_attributes``
|
||||||
|
- Extra attributes expected on the associated Spec
|
||||||
|
- Nested dictionary with string as keys, and regular expressions as leaf values
|
||||||
|
- No
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""
|
||||||
Reuse tests from other packages
|
Reuse tests from other packages
|
||||||
|
|||||||
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
|||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.5.0
|
sphinx_design==0.5.0
|
||||||
sphinx-rtd-theme==2.0.0
|
sphinx-rtd-theme==2.0.0
|
||||||
python-levenshtein==0.25.0
|
python-levenshtein==0.25.1
|
||||||
docutils==0.20.1
|
docutils==0.20.1
|
||||||
pygments==2.17.2
|
pygments==2.18.0
|
||||||
urllib3==2.2.1
|
urllib3==2.2.1
|
||||||
pytest==8.0.2
|
pytest==8.2.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.2.0
|
black==24.4.2
|
||||||
flake8==7.0.0
|
flake8==7.0.0
|
||||||
mypy==1.8.0
|
mypy==1.10.0
|
||||||
|
|||||||
255
lib/spack/env/cc
vendored
255
lib/spack/env/cc
vendored
@@ -47,7 +47,8 @@ SPACK_F77_RPATH_ARG
|
|||||||
SPACK_FC_RPATH_ARG
|
SPACK_FC_RPATH_ARG
|
||||||
SPACK_LINKER_ARG
|
SPACK_LINKER_ARG
|
||||||
SPACK_SHORT_SPEC
|
SPACK_SHORT_SPEC
|
||||||
SPACK_SYSTEM_DIRS"
|
SPACK_SYSTEM_DIRS
|
||||||
|
SPACK_MANAGED_DIRS"
|
||||||
|
|
||||||
# Optional parameters that aren't required to be set
|
# Optional parameters that aren't required to be set
|
||||||
|
|
||||||
@@ -173,22 +174,6 @@ preextend() {
|
|||||||
unset IFS
|
unset IFS
|
||||||
}
|
}
|
||||||
|
|
||||||
# system_dir PATH
|
|
||||||
# test whether a path is a system directory
|
|
||||||
system_dir() {
|
|
||||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
|
||||||
path="$1"
|
|
||||||
for sd in $SPACK_SYSTEM_DIRS; do
|
|
||||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
|
||||||
# success if path starts with a system prefix
|
|
||||||
unset IFS
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
unset IFS
|
|
||||||
return 1 # fail if path starts no system prefix
|
|
||||||
}
|
|
||||||
|
|
||||||
# Fail with a clear message if the input contains any bell characters.
|
# Fail with a clear message if the input contains any bell characters.
|
||||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||||
@@ -201,6 +186,18 @@ for param in $params; do
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||||
|
# moving the eval inside the function would eval it every call.
|
||||||
|
eval "\
|
||||||
|
path_order() {
|
||||||
|
case \"\$1\" in
|
||||||
|
$SPACK_MANAGED_DIRS) return 0 ;;
|
||||||
|
$SPACK_SYSTEM_DIRS) return 2 ;;
|
||||||
|
/*) return 1 ;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
"
|
||||||
|
|
||||||
# Check if optional parameters are defined
|
# Check if optional parameters are defined
|
||||||
# If we aren't asking for debug flags, don't add them
|
# If we aren't asking for debug flags, don't add them
|
||||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||||
@@ -248,7 +245,7 @@ case "$command" in
|
|||||||
lang_flags=C
|
lang_flags=C
|
||||||
debug_flags="-g"
|
debug_flags="-g"
|
||||||
;;
|
;;
|
||||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||||
command="$SPACK_CXX"
|
command="$SPACK_CXX"
|
||||||
language="C++"
|
language="C++"
|
||||||
comp="CXX"
|
comp="CXX"
|
||||||
@@ -420,11 +417,12 @@ input_command="$*"
|
|||||||
parse_Wl() {
|
parse_Wl() {
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
if [ "$wl_expect_rpath" = yes ]; then
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
if system_dir "$1"; then
|
path_order "$1"
|
||||||
append return_system_rpath_dirs_list "$1"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||||
append return_rpath_dirs_list "$1"
|
1) append return_rpath_dirs_list "$1" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$1" ;;
|
||||||
|
esac
|
||||||
wl_expect_rpath=no
|
wl_expect_rpath=no
|
||||||
else
|
else
|
||||||
case "$1" in
|
case "$1" in
|
||||||
@@ -432,21 +430,25 @@ parse_Wl() {
|
|||||||
arg="${1#-rpath=}"
|
arg="${1#-rpath=}"
|
||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
|
||||||
append return_system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_rpath_dirs_list "$arg"
|
|
||||||
fi
|
fi
|
||||||
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
arg="${1#--rpath=}"
|
arg="${1#--rpath=}"
|
||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
|
||||||
append return_system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_rpath_dirs_list "$arg"
|
|
||||||
fi
|
fi
|
||||||
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
wl_expect_rpath=yes
|
wl_expect_rpath=yes
|
||||||
@@ -473,12 +475,20 @@ categorize_arguments() {
|
|||||||
|
|
||||||
return_other_args_list=""
|
return_other_args_list=""
|
||||||
return_isystem_was_used=""
|
return_isystem_was_used=""
|
||||||
|
|
||||||
|
return_isystem_spack_store_include_dirs_list=""
|
||||||
return_isystem_system_include_dirs_list=""
|
return_isystem_system_include_dirs_list=""
|
||||||
return_isystem_include_dirs_list=""
|
return_isystem_include_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_include_dirs_list=""
|
||||||
return_system_include_dirs_list=""
|
return_system_include_dirs_list=""
|
||||||
return_include_dirs_list=""
|
return_include_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_lib_dirs_list=""
|
||||||
return_system_lib_dirs_list=""
|
return_system_lib_dirs_list=""
|
||||||
return_lib_dirs_list=""
|
return_lib_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_rpath_dirs_list=""
|
||||||
return_system_rpath_dirs_list=""
|
return_system_rpath_dirs_list=""
|
||||||
return_rpath_dirs_list=""
|
return_rpath_dirs_list=""
|
||||||
|
|
||||||
@@ -526,7 +536,7 @@ categorize_arguments() {
|
|||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
replaced="$after$stripped"
|
replaced="$after$stripped"
|
||||||
|
|
||||||
# it matched, remove it
|
# it matched, remove it
|
||||||
shift
|
shift
|
||||||
@@ -546,29 +556,32 @@ categorize_arguments() {
|
|||||||
arg="${1#-isystem}"
|
arg="${1#-isystem}"
|
||||||
return_isystem_was_used=true
|
return_isystem_was_used=true
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_isystem_system_include_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||||
append return_isystem_include_dirs_list "$arg"
|
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-I*)
|
-I*)
|
||||||
arg="${1#-I}"
|
arg="${1#-I}"
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_include_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||||
append return_include_dirs_list "$arg"
|
1) append return_include_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_include_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-L*)
|
-L*)
|
||||||
arg="${1#-L}"
|
arg="${1#-L}"
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_lib_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||||
append return_lib_dirs_list "$arg"
|
1) append return_lib_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_lib_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-l*)
|
-l*)
|
||||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||||
@@ -601,29 +614,32 @@ categorize_arguments() {
|
|||||||
break
|
break
|
||||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||||
if system_dir "$1"; then
|
path_order "$1"
|
||||||
append return_system_rpath_dirs_list "$1"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||||
append return_rpath_dirs_list "$1"
|
1) append return_rpath_dirs_list "$1" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$1" ;;
|
||||||
|
esac
|
||||||
xlinker_expect_rpath=no
|
xlinker_expect_rpath=no
|
||||||
else
|
else
|
||||||
case "$1" in
|
case "$1" in
|
||||||
-rpath=*)
|
-rpath=*)
|
||||||
arg="${1#-rpath=}"
|
arg="${1#-rpath=}"
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_rpath_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
append return_rpath_dirs_list "$arg"
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
arg="${1#--rpath=}"
|
arg="${1#--rpath=}"
|
||||||
if system_dir "$arg"; then
|
path_order "$arg"
|
||||||
append return_system_rpath_dirs_list "$arg"
|
case $? in
|
||||||
else
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
append return_rpath_dirs_list "$arg"
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
fi
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
xlinker_expect_rpath=yes
|
xlinker_expect_rpath=yes
|
||||||
@@ -661,16 +677,25 @@ categorize_arguments() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
categorize_arguments "$@"
|
categorize_arguments "$@"
|
||||||
include_dirs_list="$return_include_dirs_list"
|
|
||||||
lib_dirs_list="$return_lib_dirs_list"
|
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||||
rpath_dirs_list="$return_rpath_dirs_list"
|
system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
system_include_dirs_list="$return_system_include_dirs_list"
|
include_dirs_list="$return_include_dirs_list"
|
||||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
|
||||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||||
isystem_was_used="$return_isystem_was_used"
|
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
lib_dirs_list="$return_lib_dirs_list"
|
||||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
|
||||||
other_args_list="$return_other_args_list"
|
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||||
|
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
|
rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
|
|
||||||
|
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||||
|
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
|
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
|
|
||||||
|
isystem_was_used="$return_isystem_was_used"
|
||||||
|
other_args_list="$return_other_args_list"
|
||||||
|
|
||||||
#
|
#
|
||||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||||
@@ -730,7 +755,7 @@ esac
|
|||||||
|
|
||||||
# Linker flags
|
# Linker flags
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
ld|ccld)
|
ccld)
|
||||||
extend spack_flags_list SPACK_LDFLAGS
|
extend spack_flags_list SPACK_LDFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
@@ -738,16 +763,25 @@ esac
|
|||||||
IFS="$lsep"
|
IFS="$lsep"
|
||||||
categorize_arguments $spack_flags_list
|
categorize_arguments $spack_flags_list
|
||||||
unset IFS
|
unset IFS
|
||||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
|
||||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
|
||||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
|
||||||
spack_flags_other_args_list="$return_other_args_list"
|
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||||
|
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
|
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||||
|
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
|
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||||
|
spack_flags_other_args_list="$return_other_args_list"
|
||||||
|
|
||||||
|
|
||||||
# On macOS insert headerpad_max_install_names linker flag
|
# On macOS insert headerpad_max_install_names linker flag
|
||||||
@@ -767,11 +801,13 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
|||||||
# Append RPATH directories. Note that in the case of the
|
# Append RPATH directories. Note that in the case of the
|
||||||
# top-level package these directories may not exist yet. For dependencies
|
# top-level package these directories may not exist yet. For dependencies
|
||||||
# it is assumed that paths have already been confirmed.
|
# it is assumed that paths have already been confirmed.
|
||||||
|
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||||
|
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
|
||||||
extend lib_dirs_list SPACK_LINK_DIRS
|
extend lib_dirs_list SPACK_LINK_DIRS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -798,38 +834,50 @@ case "$mode" in
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
case "$mode" in
|
||||||
|
cpp|cc|as|ccld)
|
||||||
|
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||||
|
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||||
|
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||||
|
else
|
||||||
|
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||||
|
extend include_dirs_list SPACK_INCLUDE_DIRS
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
#
|
#
|
||||||
# Finally, reassemble the command line.
|
# Finally, reassemble the command line.
|
||||||
#
|
#
|
||||||
args_list="$flags_list"
|
args_list="$flags_list"
|
||||||
|
|
||||||
# Insert include directories just prior to any system include directories
|
# Include search paths partitioned by (in store, non-sytem, system)
|
||||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||||
extend args_list spack_flags_include_dirs_list "-I"
|
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||||
extend args_list include_dirs_list "-I"
|
extend args_list spack_store_include_dirs_list -I
|
||||||
|
|
||||||
|
extend args_list spack_flags_include_dirs_list -I
|
||||||
|
extend args_list include_dirs_list -I
|
||||||
|
|
||||||
|
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||||
|
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
case "$mode" in
|
|
||||||
cpp|cc|as|ccld)
|
|
||||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
|
||||||
elif [ "$isystem_was_used" = "true" ]; then
|
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
|
||||||
else
|
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
extend args_list spack_flags_system_include_dirs_list -I
|
extend args_list spack_flags_system_include_dirs_list -I
|
||||||
extend args_list system_include_dirs_list -I
|
extend args_list system_include_dirs_list -I
|
||||||
|
|
||||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
# Library search paths
|
# Library search paths partitioned by (in store, non-sytem, system)
|
||||||
|
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||||
|
extend args_list spack_store_lib_dirs_list "-L"
|
||||||
|
|
||||||
extend args_list spack_flags_lib_dirs_list "-L"
|
extend args_list spack_flags_lib_dirs_list "-L"
|
||||||
extend args_list lib_dirs_list "-L"
|
extend args_list lib_dirs_list "-L"
|
||||||
|
|
||||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||||
extend args_list system_lib_dirs_list "-L"
|
extend args_list system_lib_dirs_list "-L"
|
||||||
|
|
||||||
@@ -839,8 +887,12 @@ case "$mode" in
|
|||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$linker_arg$dtags_to_add"
|
append args_list "$linker_arg$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
|
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||||
|
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||||
|
|
||||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||||
extend args_list rpath_dirs_list "$rpath"
|
extend args_list rpath_dirs_list "$rpath"
|
||||||
|
|
||||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||||
extend args_list system_rpath_dirs_list "$rpath"
|
extend args_list system_rpath_dirs_list "$rpath"
|
||||||
;;
|
;;
|
||||||
@@ -848,8 +900,12 @@ case "$mode" in
|
|||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$dtags_to_add"
|
append args_list "$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
|
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
|
||||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||||
|
|
||||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||||
;;
|
;;
|
||||||
@@ -913,4 +969,3 @@ fi
|
|||||||
# Execute the full command, preserving spaces with IFS set
|
# Execute the full command, preserving spaces with IFS set
|
||||||
# to the alarm bell separator.
|
# to the alarm bell separator.
|
||||||
IFS="$lsep"; exec $full_command_list
|
IFS="$lsep"; exec $full_command_list
|
||||||
|
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e)
|
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
|||||||
Tag.attrib, merge_attrib]:
|
Tag.attrib, merge_attrib]:
|
||||||
if hasattr(self, a):
|
if hasattr(self, a):
|
||||||
if memo is not None:
|
if memo is not None:
|
||||||
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||||
else:
|
else:
|
||||||
setattr(t, a, getattr(self, a))
|
setattr(t, a, getattr(self, a))
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|||||||
3
lib/spack/external/archspec/__init__.py
vendored
3
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,3 @@
|
|||||||
"""Init file to avoid namespace packages"""
|
"""Init file to avoid namespace packages"""
|
||||||
__version__ = "0.2.2"
|
|
||||||
|
__version__ = "0.2.4"
|
||||||
|
|||||||
1
lib/spack/external/archspec/__main__.py
vendored
1
lib/spack/external/archspec/__main__.py
vendored
@@ -3,6 +3,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from .cli import main
|
from .cli import main
|
||||||
|
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|||||||
6
lib/spack/external/archspec/cli.py
vendored
6
lib/spack/external/archspec/cli.py
vendored
@@ -46,7 +46,11 @@ def _make_parser() -> argparse.ArgumentParser:
|
|||||||
|
|
||||||
def cpu() -> int:
|
def cpu() -> int:
|
||||||
"""Run the `archspec cpu` subcommand."""
|
"""Run the `archspec cpu` subcommand."""
|
||||||
print(archspec.cpu.host())
|
try:
|
||||||
|
print(archspec.cpu.host())
|
||||||
|
except FileNotFoundError as exc:
|
||||||
|
print(exc)
|
||||||
|
return 1
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
19
lib/spack/external/archspec/cpu/__init__.py
vendored
19
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,16 +5,23 @@
|
|||||||
"""The "cpu" package permits to query and compare different
|
"""The "cpu" package permits to query and compare different
|
||||||
CPU microarchitectures.
|
CPU microarchitectures.
|
||||||
"""
|
"""
|
||||||
from .microarchitecture import Microarchitecture, UnsupportedMicroarchitecture
|
from .detect import brand_string, host
|
||||||
from .microarchitecture import TARGETS, generic_microarchitecture
|
from .microarchitecture import (
|
||||||
from .microarchitecture import version_components
|
TARGETS,
|
||||||
from .detect import host
|
InvalidCompilerVersion,
|
||||||
|
Microarchitecture,
|
||||||
|
UnsupportedMicroarchitecture,
|
||||||
|
generic_microarchitecture,
|
||||||
|
version_components,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"brand_string",
|
||||||
|
"host",
|
||||||
|
"TARGETS",
|
||||||
|
"InvalidCompilerVersion",
|
||||||
"Microarchitecture",
|
"Microarchitecture",
|
||||||
"UnsupportedMicroarchitecture",
|
"UnsupportedMicroarchitecture",
|
||||||
"TARGETS",
|
|
||||||
"generic_microarchitecture",
|
"generic_microarchitecture",
|
||||||
"host",
|
|
||||||
"version_components",
|
"version_components",
|
||||||
]
|
]
|
||||||
|
|||||||
410
lib/spack/external/archspec/cpu/detect.py
vendored
410
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -4,15 +4,17 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Detection of CPU microarchitectures"""
|
"""Detection of CPU microarchitectures"""
|
||||||
import collections
|
import collections
|
||||||
import functools
|
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
|
import struct
|
||||||
import subprocess
|
import subprocess
|
||||||
import warnings
|
import warnings
|
||||||
|
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
from .microarchitecture import generic_microarchitecture, TARGETS
|
from ..vendor.cpuid.cpuid import CPUID
|
||||||
from .schema import TARGETS_JSON
|
from .microarchitecture import TARGETS, Microarchitecture, generic_microarchitecture
|
||||||
|
from .schema import CPUID_JSON, TARGETS_JSON
|
||||||
|
|
||||||
#: Mapping from operating systems to chain of commands
|
#: Mapping from operating systems to chain of commands
|
||||||
#: to obtain a dictionary of raw info on the current cpu
|
#: to obtain a dictionary of raw info on the current cpu
|
||||||
@@ -22,43 +24,46 @@
|
|||||||
#: functions checking the compatibility of the host with a given target
|
#: functions checking the compatibility of the host with a given target
|
||||||
COMPATIBILITY_CHECKS = {}
|
COMPATIBILITY_CHECKS = {}
|
||||||
|
|
||||||
|
# Constants for commonly used architectures
|
||||||
|
X86_64 = "x86_64"
|
||||||
|
AARCH64 = "aarch64"
|
||||||
|
PPC64LE = "ppc64le"
|
||||||
|
PPC64 = "ppc64"
|
||||||
|
RISCV64 = "riscv64"
|
||||||
|
|
||||||
def info_dict(operating_system):
|
|
||||||
"""Decorator to mark functions that are meant to return raw info on
|
def detection(operating_system: str):
|
||||||
the current cpu.
|
"""Decorator to mark functions that are meant to return partial information on the current cpu.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
operating_system (str or tuple): operating system for which the marked
|
operating_system: operating system where this function can be used.
|
||||||
function is a viable factory of raw info dictionaries.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def decorator(factory):
|
def decorator(factory):
|
||||||
INFO_FACTORY[operating_system].append(factory)
|
INFO_FACTORY[operating_system].append(factory)
|
||||||
|
return factory
|
||||||
@functools.wraps(factory)
|
|
||||||
def _impl():
|
|
||||||
info = factory()
|
|
||||||
|
|
||||||
# Check that info contains a few mandatory fields
|
|
||||||
msg = 'field "{0}" is missing from raw info dictionary'
|
|
||||||
assert "vendor_id" in info, msg.format("vendor_id")
|
|
||||||
assert "flags" in info, msg.format("flags")
|
|
||||||
assert "model" in info, msg.format("model")
|
|
||||||
assert "model_name" in info, msg.format("model_name")
|
|
||||||
|
|
||||||
return info
|
|
||||||
|
|
||||||
return _impl
|
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
@info_dict(operating_system="Linux")
|
def partial_uarch(
|
||||||
def proc_cpuinfo():
|
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||||
"""Returns a raw info dictionary by parsing the first entry of
|
) -> Microarchitecture:
|
||||||
``/proc/cpuinfo``
|
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||||
"""
|
return Microarchitecture(
|
||||||
info = {}
|
name=name,
|
||||||
|
parents=[],
|
||||||
|
vendor=vendor,
|
||||||
|
features=features or set(),
|
||||||
|
compilers={},
|
||||||
|
generation=generation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@detection(operating_system="Linux")
|
||||||
|
def proc_cpuinfo() -> Microarchitecture:
|
||||||
|
"""Returns a partial Microarchitecture, obtained from scanning ``/proc/cpuinfo``"""
|
||||||
|
data = {}
|
||||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||||
for line in file:
|
for line in file:
|
||||||
key, separator, value = line.partition(":")
|
key, separator, value = line.partition(":")
|
||||||
@@ -70,11 +75,121 @@ def proc_cpuinfo():
|
|||||||
#
|
#
|
||||||
# we are on a blank line separating two cpus. Exit early as
|
# we are on a blank line separating two cpus. Exit early as
|
||||||
# we want to read just the first entry in /proc/cpuinfo
|
# we want to read just the first entry in /proc/cpuinfo
|
||||||
if separator != ":" and info:
|
if separator != ":" and data:
|
||||||
break
|
break
|
||||||
|
|
||||||
info[key.strip()] = value.strip()
|
data[key.strip()] = value.strip()
|
||||||
return info
|
|
||||||
|
architecture = _machine()
|
||||||
|
if architecture == X86_64:
|
||||||
|
return partial_uarch(
|
||||||
|
vendor=data.get("vendor_id", "generic"), features=_feature_set(data, key="flags")
|
||||||
|
)
|
||||||
|
|
||||||
|
if architecture == AARCH64:
|
||||||
|
return partial_uarch(
|
||||||
|
vendor=_canonicalize_aarch64_vendor(data),
|
||||||
|
features=_feature_set(data, key="Features"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if architecture in (PPC64LE, PPC64):
|
||||||
|
generation_match = re.search(r"POWER(\d+)", data.get("cpu", ""))
|
||||||
|
try:
|
||||||
|
generation = int(generation_match.group(1))
|
||||||
|
except AttributeError:
|
||||||
|
# There might be no match under emulated environments. For instance
|
||||||
|
# emulating a ppc64le with QEMU and Docker still reports the host
|
||||||
|
# /proc/cpuinfo and not a Power
|
||||||
|
generation = 0
|
||||||
|
return partial_uarch(generation=generation)
|
||||||
|
|
||||||
|
if architecture == RISCV64:
|
||||||
|
if data.get("uarch") == "sifive,u74-mc":
|
||||||
|
data["uarch"] = "u74mc"
|
||||||
|
return partial_uarch(name=data.get("uarch", RISCV64))
|
||||||
|
|
||||||
|
return generic_microarchitecture(architecture)
|
||||||
|
|
||||||
|
|
||||||
|
class CpuidInfoCollector:
|
||||||
|
"""Collects the information we need on the host CPU from cpuid"""
|
||||||
|
|
||||||
|
# pylint: disable=too-few-public-methods
|
||||||
|
def __init__(self):
|
||||||
|
self.cpuid = CPUID()
|
||||||
|
|
||||||
|
registers = self.cpuid.registers_for(**CPUID_JSON["vendor"]["input"])
|
||||||
|
self.highest_basic_support = registers.eax
|
||||||
|
self.vendor = struct.pack("III", registers.ebx, registers.edx, registers.ecx).decode(
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
registers = self.cpuid.registers_for(**CPUID_JSON["highest_extension_support"]["input"])
|
||||||
|
self.highest_extension_support = registers.eax
|
||||||
|
|
||||||
|
self.features = self._features()
|
||||||
|
|
||||||
|
def _features(self):
|
||||||
|
result = set()
|
||||||
|
|
||||||
|
def check_features(data):
|
||||||
|
registers = self.cpuid.registers_for(**data["input"])
|
||||||
|
for feature_check in data["bits"]:
|
||||||
|
current = getattr(registers, feature_check["register"])
|
||||||
|
if self._is_bit_set(current, feature_check["bit"]):
|
||||||
|
result.add(feature_check["name"])
|
||||||
|
|
||||||
|
for call_data in CPUID_JSON["flags"]:
|
||||||
|
if call_data["input"]["eax"] > self.highest_basic_support:
|
||||||
|
continue
|
||||||
|
check_features(call_data)
|
||||||
|
|
||||||
|
for call_data in CPUID_JSON["extension-flags"]:
|
||||||
|
if call_data["input"]["eax"] > self.highest_extension_support:
|
||||||
|
continue
|
||||||
|
check_features(call_data)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _is_bit_set(self, register: int, bit: int) -> bool:
|
||||||
|
mask = 1 << bit
|
||||||
|
return register & mask > 0
|
||||||
|
|
||||||
|
def brand_string(self) -> Optional[str]:
|
||||||
|
"""Returns the brand string, if available."""
|
||||||
|
if self.highest_extension_support < 0x80000004:
|
||||||
|
return None
|
||||||
|
|
||||||
|
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
|
||||||
|
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
|
||||||
|
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
|
||||||
|
result = struct.pack(
|
||||||
|
"IIIIIIIIIIII",
|
||||||
|
r1.eax,
|
||||||
|
r1.ebx,
|
||||||
|
r1.ecx,
|
||||||
|
r1.edx,
|
||||||
|
r2.eax,
|
||||||
|
r2.ebx,
|
||||||
|
r2.ecx,
|
||||||
|
r2.edx,
|
||||||
|
r3.eax,
|
||||||
|
r3.ebx,
|
||||||
|
r3.ecx,
|
||||||
|
r3.edx,
|
||||||
|
).decode("utf-8")
|
||||||
|
return result.strip("\x00")
|
||||||
|
|
||||||
|
|
||||||
|
@detection(operating_system="Windows")
|
||||||
|
def cpuid_info():
|
||||||
|
"""Returns a partial Microarchitecture, obtained from running the cpuid instruction"""
|
||||||
|
architecture = _machine()
|
||||||
|
if architecture == X86_64:
|
||||||
|
data = CpuidInfoCollector()
|
||||||
|
return partial_uarch(vendor=data.vendor, features=data.features)
|
||||||
|
|
||||||
|
return generic_microarchitecture(architecture)
|
||||||
|
|
||||||
|
|
||||||
def _check_output(args, env):
|
def _check_output(args, env):
|
||||||
@@ -83,14 +198,25 @@ def _check_output(args, env):
|
|||||||
return str(output.decode("utf-8"))
|
return str(output.decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
WINDOWS_MAPPING = {
|
||||||
|
"AMD64": X86_64,
|
||||||
|
"ARM64": AARCH64,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _machine():
|
def _machine():
|
||||||
""" "Return the machine architecture we are on"""
|
"""Return the machine architecture we are on"""
|
||||||
operating_system = platform.system()
|
operating_system = platform.system()
|
||||||
|
|
||||||
# If we are not on Darwin, trust what Python tells us
|
# If we are not on Darwin or Windows, trust what Python tells us
|
||||||
if operating_system != "Darwin":
|
if operating_system not in ("Darwin", "Windows"):
|
||||||
return platform.machine()
|
return platform.machine()
|
||||||
|
|
||||||
|
# Normalize windows specific names
|
||||||
|
if operating_system == "Windows":
|
||||||
|
platform_machine = platform.machine()
|
||||||
|
return WINDOWS_MAPPING.get(platform_machine, platform_machine)
|
||||||
|
|
||||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||||
# need to fix that.
|
# need to fix that.
|
||||||
@@ -103,54 +229,47 @@ def _machine():
|
|||||||
if "Apple" in output:
|
if "Apple" in output:
|
||||||
# Note that a native Python interpreter on Apple M1 would return
|
# Note that a native Python interpreter on Apple M1 would return
|
||||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||||
return "aarch64"
|
return AARCH64
|
||||||
|
|
||||||
return "x86_64"
|
return X86_64
|
||||||
|
|
||||||
|
|
||||||
@info_dict(operating_system="Darwin")
|
@detection(operating_system="Darwin")
|
||||||
def sysctl_info_dict():
|
def sysctl_info() -> Microarchitecture:
|
||||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||||
child_environment = _ensure_bin_usrbin_in_path()
|
child_environment = _ensure_bin_usrbin_in_path()
|
||||||
|
|
||||||
def sysctl(*args):
|
def sysctl(*args: str) -> str:
|
||||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||||
|
|
||||||
if _machine() == "x86_64":
|
if _machine() == X86_64:
|
||||||
flags = (
|
features = (
|
||||||
sysctl("-n", "machdep.cpu.features").lower()
|
f'{sysctl("-n", "machdep.cpu.features").lower()} '
|
||||||
+ " "
|
f'{sysctl("-n", "machdep.cpu.leaf7_features").lower()}'
|
||||||
+ sysctl("-n", "machdep.cpu.leaf7_features").lower()
|
|
||||||
)
|
)
|
||||||
info = {
|
features = set(features.split())
|
||||||
"vendor_id": sysctl("-n", "machdep.cpu.vendor"),
|
|
||||||
"flags": flags,
|
|
||||||
"model": sysctl("-n", "machdep.cpu.model"),
|
|
||||||
"model name": sysctl("-n", "machdep.cpu.brand_string"),
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
model = "unknown"
|
|
||||||
model_str = sysctl("-n", "machdep.cpu.brand_string").lower()
|
|
||||||
if "m2" in model_str:
|
|
||||||
model = "m2"
|
|
||||||
elif "m1" in model_str:
|
|
||||||
model = "m1"
|
|
||||||
elif "apple" in model_str:
|
|
||||||
model = "m1"
|
|
||||||
|
|
||||||
info = {
|
# Flags detected on Darwin turned to their linux counterpart
|
||||||
"vendor_id": "Apple",
|
for darwin_flag, linux_flag in TARGETS_JSON["conversions"]["darwin_flags"].items():
|
||||||
"flags": [],
|
if darwin_flag in features:
|
||||||
"model": model,
|
features.update(linux_flag.split())
|
||||||
"CPU implementer": "Apple",
|
|
||||||
"model name": sysctl("-n", "machdep.cpu.brand_string"),
|
return partial_uarch(vendor=sysctl("-n", "machdep.cpu.vendor"), features=features)
|
||||||
}
|
|
||||||
return info
|
model = "unknown"
|
||||||
|
model_str = sysctl("-n", "machdep.cpu.brand_string").lower()
|
||||||
|
if "m2" in model_str:
|
||||||
|
model = "m2"
|
||||||
|
elif "m1" in model_str:
|
||||||
|
model = "m1"
|
||||||
|
elif "apple" in model_str:
|
||||||
|
model = "m1"
|
||||||
|
|
||||||
|
return partial_uarch(name=model, vendor="Apple")
|
||||||
|
|
||||||
|
|
||||||
def _ensure_bin_usrbin_in_path():
|
def _ensure_bin_usrbin_in_path():
|
||||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is usually found there
|
||||||
# usually found there
|
|
||||||
child_environment = dict(os.environ.items())
|
child_environment = dict(os.environ.items())
|
||||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||||
for additional_path in ("/sbin", "/usr/sbin"):
|
for additional_path in ("/sbin", "/usr/sbin"):
|
||||||
@@ -160,22 +279,10 @@ def _ensure_bin_usrbin_in_path():
|
|||||||
return child_environment
|
return child_environment
|
||||||
|
|
||||||
|
|
||||||
def adjust_raw_flags(info):
|
def _canonicalize_aarch64_vendor(data: Dict[str, str]) -> str:
|
||||||
"""Adjust the flags detected on the system to homogenize
|
"""Adjust the vendor field to make it human-readable"""
|
||||||
slightly different representations.
|
if "CPU implementer" not in data:
|
||||||
"""
|
return "generic"
|
||||||
# Flags detected on Darwin turned to their linux counterpart
|
|
||||||
flags = info.get("flags", [])
|
|
||||||
d2l = TARGETS_JSON["conversions"]["darwin_flags"]
|
|
||||||
for darwin_flag, linux_flag in d2l.items():
|
|
||||||
if darwin_flag in flags:
|
|
||||||
info["flags"] += " " + linux_flag
|
|
||||||
|
|
||||||
|
|
||||||
def adjust_raw_vendor(info):
|
|
||||||
"""Adjust the vendor field to make it human readable"""
|
|
||||||
if "CPU implementer" not in info:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Mapping numeric codes to vendor (ARM). This list is a merge from
|
# Mapping numeric codes to vendor (ARM). This list is a merge from
|
||||||
# different sources:
|
# different sources:
|
||||||
@@ -185,43 +292,37 @@ def adjust_raw_vendor(info):
|
|||||||
# https://github.com/gcc-mirror/gcc/blob/master/gcc/config/aarch64/aarch64-cores.def
|
# https://github.com/gcc-mirror/gcc/blob/master/gcc/config/aarch64/aarch64-cores.def
|
||||||
# https://patchwork.kernel.org/patch/10524949/
|
# https://patchwork.kernel.org/patch/10524949/
|
||||||
arm_vendors = TARGETS_JSON["conversions"]["arm_vendors"]
|
arm_vendors = TARGETS_JSON["conversions"]["arm_vendors"]
|
||||||
arm_code = info["CPU implementer"]
|
arm_code = data["CPU implementer"]
|
||||||
if arm_code in arm_vendors:
|
return arm_vendors.get(arm_code, arm_code)
|
||||||
info["CPU implementer"] = arm_vendors[arm_code]
|
|
||||||
|
|
||||||
|
|
||||||
def raw_info_dictionary():
|
def _feature_set(data: Dict[str, str], key: str) -> Set[str]:
|
||||||
"""Returns a dictionary with information on the cpu of the current host.
|
return set(data.get(key, "").split())
|
||||||
|
|
||||||
This function calls all the viable factories one after the other until
|
|
||||||
there's one that is able to produce the requested information.
|
def detected_info() -> Microarchitecture:
|
||||||
|
"""Returns a partial Microarchitecture with information on the CPU of the current host.
|
||||||
|
|
||||||
|
This function calls all the viable factories one after the other until there's one that is
|
||||||
|
able to produce the requested information. Falls-back to a generic microarchitecture, if none
|
||||||
|
of the calls succeed.
|
||||||
"""
|
"""
|
||||||
# pylint: disable=broad-except
|
# pylint: disable=broad-except
|
||||||
info = {}
|
|
||||||
for factory in INFO_FACTORY[platform.system()]:
|
for factory in INFO_FACTORY[platform.system()]:
|
||||||
try:
|
try:
|
||||||
info = factory()
|
return factory()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
warnings.warn(str(exc))
|
warnings.warn(str(exc))
|
||||||
|
|
||||||
if info:
|
return generic_microarchitecture(_machine())
|
||||||
adjust_raw_flags(info)
|
|
||||||
adjust_raw_vendor(info)
|
|
||||||
break
|
|
||||||
|
|
||||||
return info
|
|
||||||
|
|
||||||
|
|
||||||
def compatible_microarchitectures(info):
|
def compatible_microarchitectures(info: Microarchitecture) -> List[Microarchitecture]:
|
||||||
"""Returns an unordered list of known micro-architectures that are
|
"""Returns an unordered list of known micro-architectures that are compatible with the
|
||||||
compatible with the info dictionary passed as argument.
|
partial Microarchitecture passed as input.
|
||||||
|
|
||||||
Args:
|
|
||||||
info (dict): dictionary containing information on the host cpu
|
|
||||||
"""
|
"""
|
||||||
architecture_family = _machine()
|
architecture_family = _machine()
|
||||||
# If a tester is not registered, be conservative and assume no known
|
# If a tester is not registered, assume no known target is compatible with the host
|
||||||
# target is compatible with the host
|
|
||||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||||
return [x for x in TARGETS.values() if tester(info, x)] or [
|
return [x for x in TARGETS.values() if tester(info, x)] or [
|
||||||
generic_microarchitecture(architecture_family)
|
generic_microarchitecture(architecture_family)
|
||||||
@@ -230,8 +331,8 @@ def compatible_microarchitectures(info):
|
|||||||
|
|
||||||
def host():
|
def host():
|
||||||
"""Detects the host micro-architecture and returns it."""
|
"""Detects the host micro-architecture and returns it."""
|
||||||
# Retrieve a dictionary with raw information on the host's cpu
|
# Retrieve information on the host's cpu
|
||||||
info = raw_info_dictionary()
|
info = detected_info()
|
||||||
|
|
||||||
# Get a list of possible candidates for this micro-architecture
|
# Get a list of possible candidates for this micro-architecture
|
||||||
candidates = compatible_microarchitectures(info)
|
candidates = compatible_microarchitectures(info)
|
||||||
@@ -258,16 +359,15 @@ def sorting_fn(item):
|
|||||||
return max(candidates, key=sorting_fn)
|
return max(candidates, key=sorting_fn)
|
||||||
|
|
||||||
|
|
||||||
def compatibility_check(architecture_family):
|
def compatibility_check(architecture_family: Union[str, Tuple[str, ...]]):
|
||||||
"""Decorator to register a function as a proper compatibility check.
|
"""Decorator to register a function as a proper compatibility check.
|
||||||
|
|
||||||
A compatibility check function takes the raw info dictionary as a first
|
A compatibility check function takes a partial Microarchitecture object as a first argument,
|
||||||
argument and an arbitrary target as the second argument. It returns True
|
and an arbitrary target Microarchitecture as the second argument. It returns True if the
|
||||||
if the target is compatible with the info dictionary, False otherwise.
|
target is compatible with first argument, False otherwise.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
architecture_family (str or tuple): architecture family for which
|
architecture_family: architecture family for which this test can be used
|
||||||
this test can be used, e.g. x86_64 or ppc64le etc.
|
|
||||||
"""
|
"""
|
||||||
# Turn the argument into something iterable
|
# Turn the argument into something iterable
|
||||||
if isinstance(architecture_family, str):
|
if isinstance(architecture_family, str):
|
||||||
@@ -280,86 +380,70 @@ def decorator(func):
|
|||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
@compatibility_check(architecture_family=("ppc64le", "ppc64"))
|
@compatibility_check(architecture_family=(PPC64LE, PPC64))
|
||||||
def compatibility_check_for_power(info, target):
|
def compatibility_check_for_power(info, target):
|
||||||
"""Compatibility check for PPC64 and PPC64LE architectures."""
|
"""Compatibility check for PPC64 and PPC64LE architectures."""
|
||||||
basename = platform.machine()
|
|
||||||
generation_match = re.search(r"POWER(\d+)", info.get("cpu", ""))
|
|
||||||
try:
|
|
||||||
generation = int(generation_match.group(1))
|
|
||||||
except AttributeError:
|
|
||||||
# There might be no match under emulated environments. For instance
|
|
||||||
# emulating a ppc64le with QEMU and Docker still reports the host
|
|
||||||
# /proc/cpuinfo and not a Power
|
|
||||||
generation = 0
|
|
||||||
|
|
||||||
# We can use a target if it descends from our machine type and our
|
# We can use a target if it descends from our machine type and our
|
||||||
# generation (9 for POWER9, etc) is at least its generation.
|
# generation (9 for POWER9, etc) is at least its generation.
|
||||||
arch_root = TARGETS[basename]
|
arch_root = TARGETS[_machine()]
|
||||||
return (
|
return (
|
||||||
target == arch_root or arch_root in target.ancestors
|
target == arch_root or arch_root in target.ancestors
|
||||||
) and target.generation <= generation
|
) and target.generation <= info.generation
|
||||||
|
|
||||||
|
|
||||||
@compatibility_check(architecture_family="x86_64")
|
@compatibility_check(architecture_family=X86_64)
|
||||||
def compatibility_check_for_x86_64(info, target):
|
def compatibility_check_for_x86_64(info, target):
|
||||||
"""Compatibility check for x86_64 architectures."""
|
"""Compatibility check for x86_64 architectures."""
|
||||||
basename = "x86_64"
|
|
||||||
vendor = info.get("vendor_id", "generic")
|
|
||||||
features = set(info.get("flags", "").split())
|
|
||||||
|
|
||||||
# We can use a target if it descends from our machine type, is from our
|
# We can use a target if it descends from our machine type, is from our
|
||||||
# vendor, and we have all of its features
|
# vendor, and we have all of its features
|
||||||
arch_root = TARGETS[basename]
|
arch_root = TARGETS[X86_64]
|
||||||
return (
|
return (
|
||||||
(target == arch_root or arch_root in target.ancestors)
|
(target == arch_root or arch_root in target.ancestors)
|
||||||
and target.vendor in (vendor, "generic")
|
and target.vendor in (info.vendor, "generic")
|
||||||
and target.features.issubset(features)
|
and target.features.issubset(info.features)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@compatibility_check(architecture_family="aarch64")
|
@compatibility_check(architecture_family=AARCH64)
|
||||||
def compatibility_check_for_aarch64(info, target):
|
def compatibility_check_for_aarch64(info, target):
|
||||||
"""Compatibility check for AARCH64 architectures."""
|
"""Compatibility check for AARCH64 architectures."""
|
||||||
basename = "aarch64"
|
# At the moment, it's not clear how to detect compatibility with
|
||||||
features = set(info.get("Features", "").split())
|
|
||||||
vendor = info.get("CPU implementer", "generic")
|
|
||||||
|
|
||||||
# At the moment it's not clear how to detect compatibility with
|
|
||||||
# a specific version of the architecture
|
# a specific version of the architecture
|
||||||
if target.vendor == "generic" and target.name != "aarch64":
|
if target.vendor == "generic" and target.name != AARCH64:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
arch_root = TARGETS[basename]
|
arch_root = TARGETS[AARCH64]
|
||||||
arch_root_and_vendor = arch_root == target.family and target.vendor in (
|
arch_root_and_vendor = arch_root == target.family and target.vendor in (
|
||||||
vendor,
|
info.vendor,
|
||||||
"generic",
|
"generic",
|
||||||
)
|
)
|
||||||
|
|
||||||
# On macOS it seems impossible to get all the CPU features
|
# On macOS it seems impossible to get all the CPU features
|
||||||
# with syctl info, but for ARM we can get the exact model
|
# with syctl info, but for ARM we can get the exact model
|
||||||
if platform.system() == "Darwin":
|
if platform.system() == "Darwin":
|
||||||
model_key = info.get("model", basename)
|
model = TARGETS[info.name]
|
||||||
model = TARGETS[model_key]
|
|
||||||
return arch_root_and_vendor and (target == model or target in model.ancestors)
|
return arch_root_and_vendor and (target == model or target in model.ancestors)
|
||||||
|
|
||||||
return arch_root_and_vendor and target.features.issubset(features)
|
return arch_root_and_vendor and target.features.issubset(info.features)
|
||||||
|
|
||||||
|
|
||||||
@compatibility_check(architecture_family="riscv64")
|
@compatibility_check(architecture_family=RISCV64)
|
||||||
def compatibility_check_for_riscv64(info, target):
|
def compatibility_check_for_riscv64(info, target):
|
||||||
"""Compatibility check for riscv64 architectures."""
|
"""Compatibility check for riscv64 architectures."""
|
||||||
basename = "riscv64"
|
arch_root = TARGETS[RISCV64]
|
||||||
uarch = info.get("uarch")
|
|
||||||
|
|
||||||
# sifive unmatched board
|
|
||||||
if uarch == "sifive,u74-mc":
|
|
||||||
uarch = "u74mc"
|
|
||||||
# catch-all for unknown uarchs
|
|
||||||
else:
|
|
||||||
uarch = "riscv64"
|
|
||||||
|
|
||||||
arch_root = TARGETS[basename]
|
|
||||||
return (target == arch_root or arch_root in target.ancestors) and (
|
return (target == arch_root or arch_root in target.ancestors) and (
|
||||||
target == uarch or target.vendor == "generic"
|
target.name == info.name or target.vendor == "generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def brand_string() -> Optional[str]:
|
||||||
|
"""Returns the brand string of the host, if detected, or None."""
|
||||||
|
if platform.system() == "Darwin":
|
||||||
|
return _check_output(
|
||||||
|
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
if host().family == X86_64:
|
||||||
|
return CpuidInfoCollector().brand_string()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
import archspec
|
import archspec
|
||||||
import archspec.cpu.alias
|
import archspec.cpu.alias
|
||||||
import archspec.cpu.schema
|
import archspec.cpu.schema
|
||||||
|
|
||||||
from .alias import FEATURE_ALIASES
|
from .alias import FEATURE_ALIASES
|
||||||
from .schema import LazyDictionary
|
from .schema import LazyDictionary
|
||||||
|
|
||||||
@@ -47,7 +48,7 @@ class Microarchitecture:
|
|||||||
which has "broadwell" as a parent, supports running binaries
|
which has "broadwell" as a parent, supports running binaries
|
||||||
optimized for "broadwell".
|
optimized for "broadwell".
|
||||||
vendor (str): vendor of the micro-architecture
|
vendor (str): vendor of the micro-architecture
|
||||||
features (list of str): supported CPU flags. Note that the semantic
|
features (set of str): supported CPU flags. Note that the semantic
|
||||||
of the flags in this field might vary among architectures, if
|
of the flags in this field might vary among architectures, if
|
||||||
at all present. For instance x86_64 processors will list all
|
at all present. For instance x86_64 processors will list all
|
||||||
the flags supported by a given CPU while Arm processors will
|
the flags supported by a given CPU while Arm processors will
|
||||||
@@ -180,29 +181,35 @@ def generic(self):
|
|||||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||||
return max(generics, key=lambda x: len(x.ancestors))
|
return max(generics, key=lambda x: len(x.ancestors))
|
||||||
|
|
||||||
def to_dict(self, return_list_of_items=False):
|
def to_dict(self):
|
||||||
"""Returns a dictionary representation of this object.
|
"""Returns a dictionary representation of this object."""
|
||||||
|
return {
|
||||||
|
"name": str(self.name),
|
||||||
|
"vendor": str(self.vendor),
|
||||||
|
"features": sorted(str(x) for x in self.features),
|
||||||
|
"generation": self.generation,
|
||||||
|
"parents": [str(x) for x in self.parents],
|
||||||
|
"compilers": self.compilers,
|
||||||
|
}
|
||||||
|
|
||||||
Args:
|
@staticmethod
|
||||||
return_list_of_items (bool): if True returns an ordered list of
|
def from_dict(data) -> "Microarchitecture":
|
||||||
items instead of the dictionary
|
"""Construct a microarchitecture from a dictionary representation."""
|
||||||
"""
|
return Microarchitecture(
|
||||||
list_of_items = [
|
name=data["name"],
|
||||||
("name", str(self.name)),
|
parents=[TARGETS[x] for x in data["parents"]],
|
||||||
("vendor", str(self.vendor)),
|
vendor=data["vendor"],
|
||||||
("features", sorted(str(x) for x in self.features)),
|
features=set(data["features"]),
|
||||||
("generation", self.generation),
|
compilers=data.get("compilers", {}),
|
||||||
("parents", [str(x) for x in self.parents]),
|
generation=data.get("generation", 0),
|
||||||
]
|
)
|
||||||
if return_list_of_items:
|
|
||||||
return list_of_items
|
|
||||||
|
|
||||||
return dict(list_of_items)
|
|
||||||
|
|
||||||
def optimization_flags(self, compiler, version):
|
def optimization_flags(self, compiler, version):
|
||||||
"""Returns a string containing the optimization flags that needs
|
"""Returns a string containing the optimization flags that needs
|
||||||
to be used to produce code optimized for this micro-architecture.
|
to be used to produce code optimized for this micro-architecture.
|
||||||
|
|
||||||
|
The version is expected to be a string of dot separated digits.
|
||||||
|
|
||||||
If there is no information on the compiler passed as argument the
|
If there is no information on the compiler passed as argument the
|
||||||
function returns an empty string. If it is known that the compiler
|
function returns an empty string. If it is known that the compiler
|
||||||
version we want to use does not support this architecture the function
|
version we want to use does not support this architecture the function
|
||||||
@@ -211,6 +218,11 @@ def optimization_flags(self, compiler, version):
|
|||||||
Args:
|
Args:
|
||||||
compiler (str): name of the compiler to be used
|
compiler (str): name of the compiler to be used
|
||||||
version (str): version of the compiler to be used
|
version (str): version of the compiler to be used
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnsupportedMicroarchitecture: if the requested compiler does not support
|
||||||
|
this micro-architecture.
|
||||||
|
ValueError: if the version doesn't match the expected format
|
||||||
"""
|
"""
|
||||||
# If we don't have information on compiler at all return an empty string
|
# If we don't have information on compiler at all return an empty string
|
||||||
if compiler not in self.family.compilers:
|
if compiler not in self.family.compilers:
|
||||||
@@ -227,6 +239,14 @@ def optimization_flags(self, compiler, version):
|
|||||||
msg = msg.format(compiler, best_target, best_target.family)
|
msg = msg.format(compiler, best_target, best_target.family)
|
||||||
raise UnsupportedMicroarchitecture(msg)
|
raise UnsupportedMicroarchitecture(msg)
|
||||||
|
|
||||||
|
# Check that the version matches the expected format
|
||||||
|
if not re.match(r"^(?:\d+\.)*\d+$", version):
|
||||||
|
msg = (
|
||||||
|
"invalid format for the compiler version argument. "
|
||||||
|
"Only dot separated digits are allowed."
|
||||||
|
)
|
||||||
|
raise InvalidCompilerVersion(msg)
|
||||||
|
|
||||||
# If we have information on this compiler we need to check the
|
# If we have information on this compiler we need to check the
|
||||||
# version being used
|
# version being used
|
||||||
compiler_info = self.compilers[compiler]
|
compiler_info = self.compilers[compiler]
|
||||||
@@ -271,9 +291,7 @@ def tuplify(ver):
|
|||||||
flags = flags_fmt.format(**compiler_entry)
|
flags = flags_fmt.format(**compiler_entry)
|
||||||
return flags
|
return flags
|
||||||
|
|
||||||
msg = (
|
msg = "cannot produce optimized binary for micro-architecture '{0}' with {1}@{2}"
|
||||||
"cannot produce optimized binary for micro-architecture '{0}' with {1}@{2}"
|
|
||||||
)
|
|
||||||
if compiler_info:
|
if compiler_info:
|
||||||
versions = [x["versions"] for x in compiler_info]
|
versions = [x["versions"] for x in compiler_info]
|
||||||
msg += f' [supported compiler versions are {", ".join(versions)}]'
|
msg += f' [supported compiler versions are {", ".join(versions)}]'
|
||||||
@@ -289,9 +307,7 @@ def generic_microarchitecture(name):
|
|||||||
Args:
|
Args:
|
||||||
name (str): name of the micro-architecture
|
name (str): name of the micro-architecture
|
||||||
"""
|
"""
|
||||||
return Microarchitecture(
|
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
|
||||||
name, parents=[], vendor="generic", features=[], compilers={}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def version_components(version):
|
def version_components(version):
|
||||||
@@ -345,9 +361,7 @@ def fill_target_from_dict(name, data, targets):
|
|||||||
compilers = values.get("compilers", {})
|
compilers = values.get("compilers", {})
|
||||||
generation = values.get("generation", 0)
|
generation = values.get("generation", 0)
|
||||||
|
|
||||||
targets[name] = Microarchitecture(
|
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||||
name, parents, vendor, features, compilers, generation
|
|
||||||
)
|
|
||||||
|
|
||||||
known_targets = {}
|
known_targets = {}
|
||||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||||
@@ -368,7 +382,15 @@ def fill_target_from_dict(name, data, targets):
|
|||||||
TARGETS = LazyDictionary(_known_microarchitectures)
|
TARGETS = LazyDictionary(_known_microarchitectures)
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedMicroarchitecture(ValueError):
|
class ArchspecError(Exception):
|
||||||
|
"""Base class for errors within archspec"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
|
||||||
"""Raised if a compiler version does not support optimization for a given
|
"""Raised if a compiler version does not support optimization for a given
|
||||||
micro-architecture.
|
micro-architecture.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidCompilerVersion(ArchspecError, ValueError):
|
||||||
|
"""Raised when an invalid format is used for compiler versions in archspec."""
|
||||||
|
|||||||
68
lib/spack/external/archspec/cpu/schema.py
vendored
68
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -7,7 +7,9 @@
|
|||||||
"""
|
"""
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import json
|
import json
|
||||||
import os.path
|
import os
|
||||||
|
import pathlib
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
|
||||||
class LazyDictionary(collections.abc.MutableMapping):
|
class LazyDictionary(collections.abc.MutableMapping):
|
||||||
@@ -46,21 +48,65 @@ def __len__(self):
|
|||||||
return len(self.data)
|
return len(self.data)
|
||||||
|
|
||||||
|
|
||||||
def _load_json_file(json_file):
|
#: Environment variable that might point to a directory with a user defined JSON file
|
||||||
json_dir = os.path.join(os.path.dirname(__file__), "..", "json", "cpu")
|
DIR_FROM_ENVIRONMENT = "ARCHSPEC_CPU_DIR"
|
||||||
json_dir = os.path.abspath(json_dir)
|
|
||||||
|
|
||||||
def _factory():
|
#: Environment variable that might point to a directory with extensions to JSON files
|
||||||
filename = os.path.join(json_dir, json_file)
|
EXTENSION_DIR_FROM_ENVIRONMENT = "ARCHSPEC_EXTENSION_CPU_DIR"
|
||||||
with open(filename, "r", encoding="utf-8") as file:
|
|
||||||
return json.load(file)
|
|
||||||
|
|
||||||
return _factory
|
|
||||||
|
def _json_file(filename: str, allow_custom: bool = False) -> Tuple[pathlib.Path, pathlib.Path]:
|
||||||
|
"""Given a filename, returns the absolute path for the main JSON file, and an
|
||||||
|
optional absolute path for an extension JSON file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: filename for the JSON file
|
||||||
|
allow_custom: if True, allows overriding the location where the file resides
|
||||||
|
"""
|
||||||
|
json_dir = pathlib.Path(__file__).parent / ".." / "json" / "cpu"
|
||||||
|
if allow_custom and DIR_FROM_ENVIRONMENT in os.environ:
|
||||||
|
json_dir = pathlib.Path(os.environ[DIR_FROM_ENVIRONMENT])
|
||||||
|
json_dir = json_dir.absolute()
|
||||||
|
json_file = json_dir / filename
|
||||||
|
|
||||||
|
extension_file = None
|
||||||
|
if allow_custom and EXTENSION_DIR_FROM_ENVIRONMENT in os.environ:
|
||||||
|
extension_dir = pathlib.Path(os.environ[EXTENSION_DIR_FROM_ENVIRONMENT])
|
||||||
|
extension_dir.absolute()
|
||||||
|
extension_file = extension_dir / filename
|
||||||
|
|
||||||
|
return json_file, extension_file
|
||||||
|
|
||||||
|
|
||||||
|
def _load(json_file: pathlib.Path, extension_file: pathlib.Path):
|
||||||
|
with open(json_file, "r", encoding="utf-8") as file:
|
||||||
|
data = json.load(file)
|
||||||
|
|
||||||
|
if not extension_file or not extension_file.exists():
|
||||||
|
return data
|
||||||
|
|
||||||
|
with open(extension_file, "r", encoding="utf-8") as file:
|
||||||
|
extension_data = json.load(file)
|
||||||
|
|
||||||
|
top_level_sections = list(data.keys())
|
||||||
|
for key in top_level_sections:
|
||||||
|
if key not in extension_data:
|
||||||
|
continue
|
||||||
|
|
||||||
|
data[key].update(extension_data[key])
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
#: In memory representation of the data in microarchitectures.json,
|
#: In memory representation of the data in microarchitectures.json,
|
||||||
#: loaded on first access
|
#: loaded on first access
|
||||||
TARGETS_JSON = LazyDictionary(_load_json_file("microarchitectures.json"))
|
TARGETS_JSON = LazyDictionary(_load, *_json_file("microarchitectures.json", allow_custom=True))
|
||||||
|
|
||||||
#: JSON schema for microarchitectures.json, loaded on first access
|
#: JSON schema for microarchitectures.json, loaded on first access
|
||||||
SCHEMA = LazyDictionary(_load_json_file("microarchitectures_schema.json"))
|
TARGETS_JSON_SCHEMA = LazyDictionary(_load, *_json_file("microarchitectures_schema.json"))
|
||||||
|
|
||||||
|
#: Information on how to call 'cpuid' to get information on the HOST CPU
|
||||||
|
CPUID_JSON = LazyDictionary(_load, *_json_file("cpuid.json", allow_custom=True))
|
||||||
|
|
||||||
|
#: JSON schema for cpuid.json, loaded on first access
|
||||||
|
CPUID_JSON_SCHEMA = LazyDictionary(_load, *_json_file("cpuid_schema.json"))
|
||||||
|
|||||||
10
lib/spack/external/archspec/json/README.md
vendored
10
lib/spack/external/archspec/json/README.md
vendored
@@ -9,11 +9,11 @@ language specific APIs.
|
|||||||
|
|
||||||
Currently the repository contains the following JSON files:
|
Currently the repository contains the following JSON files:
|
||||||
```console
|
```console
|
||||||
.
|
cpu/
|
||||||
├── COPYRIGHT
|
├── cpuid.json # Contains information on CPUID calls to retrieve vendor and features on x86_64
|
||||||
└── cpu
|
├── cpuid_schema.json # Schema for the file above
|
||||||
├── microarchitectures.json # Contains information on CPU microarchitectures
|
├── microarchitectures.json # Contains information on CPU microarchitectures
|
||||||
└── microarchitectures_schema.json # Schema for the file above
|
└── microarchitectures_schema.json # Schema for the file above
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
1050
lib/spack/external/archspec/json/cpu/cpuid.json
vendored
Normal file
1050
lib/spack/external/archspec/json/cpu/cpuid.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
134
lib/spack/external/archspec/json/cpu/cpuid_schema.json
vendored
Normal file
134
lib/spack/external/archspec/json/cpu/cpuid_schema.json
vendored
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "Schema for microarchitecture definitions and feature aliases",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"vendor": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"eax": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"ecx": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"highest_extension_support": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"eax": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"ecx": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flags": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"eax": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"ecx": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bits": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"register": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"bit": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"extension-flags": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"input": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"eax": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"ecx": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bits": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"register": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"bit": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2937,8 +2937,6 @@
|
|||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
"paca",
|
|
||||||
"pacg",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3066,8 +3064,6 @@
|
|||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
"sb",
|
"sb",
|
||||||
"paca",
|
|
||||||
"pacg",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
"sveaes",
|
"sveaes",
|
||||||
@@ -3081,8 +3077,7 @@
|
|||||||
"svebf16",
|
"svebf16",
|
||||||
"i8mm",
|
"i8mm",
|
||||||
"bf16",
|
"bf16",
|
||||||
"dgh",
|
"dgh"
|
||||||
"bti"
|
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
|
|||||||
20
lib/spack/external/archspec/vendor/cpuid/LICENSE
vendored
Normal file
20
lib/spack/external/archspec/vendor/cpuid/LICENSE
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Anders Høst
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
76
lib/spack/external/archspec/vendor/cpuid/README.md
vendored
Normal file
76
lib/spack/external/archspec/vendor/cpuid/README.md
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
cpuid.py
|
||||||
|
========
|
||||||
|
|
||||||
|
Now, this is silly!
|
||||||
|
|
||||||
|
Pure Python library for accessing information about x86 processors
|
||||||
|
by querying the [CPUID](http://en.wikipedia.org/wiki/CPUID)
|
||||||
|
instruction. Well, not exactly pure Python...
|
||||||
|
|
||||||
|
It works by allocating a small piece of virtual memory, copying
|
||||||
|
a raw x86 function to that memory, giving the memory execute
|
||||||
|
permissions and then calling the memory as a function. The injected
|
||||||
|
function executes the CPUID instruction and copies the result back
|
||||||
|
to a ctypes.Structure where is can be read by Python.
|
||||||
|
|
||||||
|
It should work fine on both 32 and 64 bit versions of Windows and Linux
|
||||||
|
running x86 processors. Apple OS X and other BSD systems should also work,
|
||||||
|
not tested though...
|
||||||
|
|
||||||
|
|
||||||
|
Why?
|
||||||
|
----
|
||||||
|
For poops and giggles. Plus, having access to a low-level feature
|
||||||
|
without having to compile a C wrapper is pretty neat.
|
||||||
|
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
Getting info with eax=0:
|
||||||
|
|
||||||
|
import cpuid
|
||||||
|
|
||||||
|
q = cpuid.CPUID()
|
||||||
|
eax, ebx, ecx, edx = q(0)
|
||||||
|
|
||||||
|
Running the files:
|
||||||
|
|
||||||
|
$ python example.py
|
||||||
|
Vendor ID : GenuineIntel
|
||||||
|
CPU name : Intel(R) Xeon(R) CPU W3550 @ 3.07GHz
|
||||||
|
|
||||||
|
Vector instructions supported:
|
||||||
|
SSE : Yes
|
||||||
|
SSE2 : Yes
|
||||||
|
SSE3 : Yes
|
||||||
|
SSSE3 : Yes
|
||||||
|
SSE4.1 : Yes
|
||||||
|
SSE4.2 : Yes
|
||||||
|
SSE4a : --
|
||||||
|
AVX : --
|
||||||
|
AVX2 : --
|
||||||
|
|
||||||
|
$ python cpuid.py
|
||||||
|
CPUID A B C D
|
||||||
|
00000000 0000000b 756e6547 6c65746e 49656e69
|
||||||
|
00000001 000106a5 00100800 009ce3bd bfebfbff
|
||||||
|
00000002 55035a01 00f0b2e4 00000000 09ca212c
|
||||||
|
00000003 00000000 00000000 00000000 00000000
|
||||||
|
00000004 00000000 00000000 00000000 00000000
|
||||||
|
00000005 00000040 00000040 00000003 00001120
|
||||||
|
00000006 00000003 00000002 00000001 00000000
|
||||||
|
00000007 00000000 00000000 00000000 00000000
|
||||||
|
00000008 00000000 00000000 00000000 00000000
|
||||||
|
00000009 00000000 00000000 00000000 00000000
|
||||||
|
0000000a 07300403 00000044 00000000 00000603
|
||||||
|
0000000b 00000000 00000000 00000095 00000000
|
||||||
|
80000000 80000008 00000000 00000000 00000000
|
||||||
|
80000001 00000000 00000000 00000001 28100800
|
||||||
|
80000002 65746e49 2952286c 6f655820 2952286e
|
||||||
|
80000003 55504320 20202020 20202020 57202020
|
||||||
|
80000004 30353533 20402020 37302e33 007a4847
|
||||||
|
80000005 00000000 00000000 00000000 00000000
|
||||||
|
80000006 00000000 00000000 01006040 00000000
|
||||||
|
80000007 00000000 00000000 00000000 00000100
|
||||||
|
80000008 00003024 00000000 00000000 00000000
|
||||||
|
|
||||||
172
lib/spack/external/archspec/vendor/cpuid/cpuid.py
vendored
Normal file
172
lib/spack/external/archspec/vendor/cpuid/cpuid.py
vendored
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (c) 2024 Anders Høst
|
||||||
|
#
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import platform
|
||||||
|
import os
|
||||||
|
import ctypes
|
||||||
|
from ctypes import c_uint32, c_long, c_ulong, c_size_t, c_void_p, POINTER, CFUNCTYPE
|
||||||
|
|
||||||
|
# Posix x86_64:
|
||||||
|
# Three first call registers : RDI, RSI, RDX
|
||||||
|
# Volatile registers : RAX, RCX, RDX, RSI, RDI, R8-11
|
||||||
|
|
||||||
|
# Windows x86_64:
|
||||||
|
# Three first call registers : RCX, RDX, R8
|
||||||
|
# Volatile registers : RAX, RCX, RDX, R8-11
|
||||||
|
|
||||||
|
# cdecl 32 bit:
|
||||||
|
# Three first call registers : Stack (%esp)
|
||||||
|
# Volatile registers : EAX, ECX, EDX
|
||||||
|
|
||||||
|
_POSIX_64_OPC = [
|
||||||
|
0x53, # push %rbx
|
||||||
|
0x89, 0xf0, # mov %esi,%eax
|
||||||
|
0x89, 0xd1, # mov %edx,%ecx
|
||||||
|
0x0f, 0xa2, # cpuid
|
||||||
|
0x89, 0x07, # mov %eax,(%rdi)
|
||||||
|
0x89, 0x5f, 0x04, # mov %ebx,0x4(%rdi)
|
||||||
|
0x89, 0x4f, 0x08, # mov %ecx,0x8(%rdi)
|
||||||
|
0x89, 0x57, 0x0c, # mov %edx,0xc(%rdi)
|
||||||
|
0x5b, # pop %rbx
|
||||||
|
0xc3 # retq
|
||||||
|
]
|
||||||
|
|
||||||
|
_WINDOWS_64_OPC = [
|
||||||
|
0x53, # push %rbx
|
||||||
|
0x89, 0xd0, # mov %edx,%eax
|
||||||
|
0x49, 0x89, 0xc9, # mov %rcx,%r9
|
||||||
|
0x44, 0x89, 0xc1, # mov %r8d,%ecx
|
||||||
|
0x0f, 0xa2, # cpuid
|
||||||
|
0x41, 0x89, 0x01, # mov %eax,(%r9)
|
||||||
|
0x41, 0x89, 0x59, 0x04, # mov %ebx,0x4(%r9)
|
||||||
|
0x41, 0x89, 0x49, 0x08, # mov %ecx,0x8(%r9)
|
||||||
|
0x41, 0x89, 0x51, 0x0c, # mov %edx,0xc(%r9)
|
||||||
|
0x5b, # pop %rbx
|
||||||
|
0xc3 # retq
|
||||||
|
]
|
||||||
|
|
||||||
|
_CDECL_32_OPC = [
|
||||||
|
0x53, # push %ebx
|
||||||
|
0x57, # push %edi
|
||||||
|
0x8b, 0x7c, 0x24, 0x0c, # mov 0xc(%esp),%edi
|
||||||
|
0x8b, 0x44, 0x24, 0x10, # mov 0x10(%esp),%eax
|
||||||
|
0x8b, 0x4c, 0x24, 0x14, # mov 0x14(%esp),%ecx
|
||||||
|
0x0f, 0xa2, # cpuid
|
||||||
|
0x89, 0x07, # mov %eax,(%edi)
|
||||||
|
0x89, 0x5f, 0x04, # mov %ebx,0x4(%edi)
|
||||||
|
0x89, 0x4f, 0x08, # mov %ecx,0x8(%edi)
|
||||||
|
0x89, 0x57, 0x0c, # mov %edx,0xc(%edi)
|
||||||
|
0x5f, # pop %edi
|
||||||
|
0x5b, # pop %ebx
|
||||||
|
0xc3 # ret
|
||||||
|
]
|
||||||
|
|
||||||
|
is_windows = os.name == "nt"
|
||||||
|
is_64bit = ctypes.sizeof(ctypes.c_voidp) == 8
|
||||||
|
|
||||||
|
|
||||||
|
class CPUID_struct(ctypes.Structure):
|
||||||
|
_register_names = ("eax", "ebx", "ecx", "edx")
|
||||||
|
_fields_ = [(r, c_uint32) for r in _register_names]
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
if item not in self._register_names:
|
||||||
|
raise KeyError(item)
|
||||||
|
return getattr(self, item)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "eax=0x{:x}, ebx=0x{:x}, ecx=0x{:x}, edx=0x{:x}".format(self.eax, self.ebx, self.ecx, self.edx)
|
||||||
|
|
||||||
|
|
||||||
|
class CPUID(object):
|
||||||
|
def __init__(self):
|
||||||
|
if platform.machine() not in ("AMD64", "x86_64", "x86", "i686"):
|
||||||
|
raise SystemError("Only available for x86")
|
||||||
|
|
||||||
|
if is_windows:
|
||||||
|
if is_64bit:
|
||||||
|
# VirtualAlloc seems to fail under some weird
|
||||||
|
# circumstances when ctypes.windll.kernel32 is
|
||||||
|
# used under 64 bit Python. CDLL fixes this.
|
||||||
|
self.win = ctypes.CDLL("kernel32.dll")
|
||||||
|
opc = _WINDOWS_64_OPC
|
||||||
|
else:
|
||||||
|
# Here ctypes.windll.kernel32 is needed to get the
|
||||||
|
# right DLL. Otherwise it will fail when running
|
||||||
|
# 32 bit Python on 64 bit Windows.
|
||||||
|
self.win = ctypes.windll.kernel32
|
||||||
|
opc = _CDECL_32_OPC
|
||||||
|
else:
|
||||||
|
opc = _POSIX_64_OPC if is_64bit else _CDECL_32_OPC
|
||||||
|
|
||||||
|
size = len(opc)
|
||||||
|
code = (ctypes.c_ubyte * size)(*opc)
|
||||||
|
|
||||||
|
if is_windows:
|
||||||
|
self.win.VirtualAlloc.restype = c_void_p
|
||||||
|
self.win.VirtualAlloc.argtypes = [ctypes.c_void_p, ctypes.c_size_t, ctypes.c_ulong, ctypes.c_ulong]
|
||||||
|
self.addr = self.win.VirtualAlloc(None, size, 0x1000, 0x40)
|
||||||
|
if not self.addr:
|
||||||
|
raise MemoryError("Could not allocate RWX memory")
|
||||||
|
ctypes.memmove(self.addr, code, size)
|
||||||
|
else:
|
||||||
|
from mmap import (
|
||||||
|
mmap,
|
||||||
|
MAP_PRIVATE,
|
||||||
|
MAP_ANONYMOUS,
|
||||||
|
PROT_WRITE,
|
||||||
|
PROT_READ,
|
||||||
|
PROT_EXEC,
|
||||||
|
)
|
||||||
|
self.mm = mmap(
|
||||||
|
-1,
|
||||||
|
size,
|
||||||
|
flags=MAP_PRIVATE | MAP_ANONYMOUS,
|
||||||
|
prot=PROT_WRITE | PROT_READ | PROT_EXEC,
|
||||||
|
)
|
||||||
|
self.mm.write(code)
|
||||||
|
self.addr = ctypes.addressof(ctypes.c_int.from_buffer(self.mm))
|
||||||
|
|
||||||
|
func_type = CFUNCTYPE(None, POINTER(CPUID_struct), c_uint32, c_uint32)
|
||||||
|
self.func_ptr = func_type(self.addr)
|
||||||
|
|
||||||
|
def __call__(self, eax, ecx=0):
|
||||||
|
struct = self.registers_for(eax=eax, ecx=ecx)
|
||||||
|
return struct.eax, struct.ebx, struct.ecx, struct.edx
|
||||||
|
|
||||||
|
def registers_for(self, eax, ecx=0):
|
||||||
|
"""Calls cpuid with eax and ecx set as the input arguments, and returns a structure
|
||||||
|
containing eax, ebx, ecx, and edx.
|
||||||
|
"""
|
||||||
|
struct = CPUID_struct()
|
||||||
|
self.func_ptr(struct, eax, ecx)
|
||||||
|
return struct
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
if is_windows:
|
||||||
|
self.win.VirtualFree.restype = c_long
|
||||||
|
self.win.VirtualFree.argtypes = [c_void_p, c_size_t, c_ulong]
|
||||||
|
self.win.VirtualFree(self.addr, 0, 0x8000)
|
||||||
|
else:
|
||||||
|
self.mm.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
def valid_inputs():
|
||||||
|
cpuid = CPUID()
|
||||||
|
for eax in (0x0, 0x80000000):
|
||||||
|
highest, _, _, _ = cpuid(eax)
|
||||||
|
while eax <= highest:
|
||||||
|
regs = cpuid(eax)
|
||||||
|
yield (eax, regs)
|
||||||
|
eax += 1
|
||||||
|
|
||||||
|
|
||||||
|
print(" ".join(x.ljust(8) for x in ("CPUID", "A", "B", "C", "D")).strip())
|
||||||
|
for eax, regs in valid_inputs():
|
||||||
|
print("%08x" % eax, " ".join("%08x" % reg for reg in regs))
|
||||||
62
lib/spack/external/archspec/vendor/cpuid/example.py
vendored
Normal file
62
lib/spack/external/archspec/vendor/cpuid/example.py
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (c) 2024 Anders Høst
|
||||||
|
#
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import struct
|
||||||
|
import cpuid
|
||||||
|
|
||||||
|
|
||||||
|
def cpu_vendor(cpu):
|
||||||
|
_, b, c, d = cpu(0)
|
||||||
|
return struct.pack("III", b, d, c).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def cpu_name(cpu):
|
||||||
|
name = "".join((struct.pack("IIII", *cpu(0x80000000 + i)).decode("utf-8")
|
||||||
|
for i in range(2, 5)))
|
||||||
|
|
||||||
|
return name.split('\x00', 1)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def is_set(cpu, leaf, subleaf, reg_idx, bit):
|
||||||
|
"""
|
||||||
|
@param {leaf} %eax
|
||||||
|
@param {sublead} %ecx, 0 in most cases
|
||||||
|
@param {reg_idx} idx of [%eax, %ebx, %ecx, %edx], 0-based
|
||||||
|
@param {bit} bit of reg selected by {reg_idx}, 0-based
|
||||||
|
"""
|
||||||
|
|
||||||
|
regs = cpu(leaf, subleaf)
|
||||||
|
|
||||||
|
if (1 << bit) & regs[reg_idx]:
|
||||||
|
return "Yes"
|
||||||
|
else:
|
||||||
|
return "--"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cpu = cpuid.CPUID()
|
||||||
|
|
||||||
|
print("Vendor ID : %s" % cpu_vendor(cpu))
|
||||||
|
print("CPU name : %s" % cpu_name(cpu))
|
||||||
|
print()
|
||||||
|
print("Vector instructions supported:")
|
||||||
|
print("SSE : %s" % is_set(cpu, 1, 0, 3, 25))
|
||||||
|
print("SSE2 : %s" % is_set(cpu, 1, 0, 3, 26))
|
||||||
|
print("SSE3 : %s" % is_set(cpu, 1, 0, 2, 0))
|
||||||
|
print("SSSE3 : %s" % is_set(cpu, 1, 0, 2, 9))
|
||||||
|
print("SSE4.1 : %s" % is_set(cpu, 1, 0, 2, 19))
|
||||||
|
print("SSE4.2 : %s" % is_set(cpu, 1, 0, 2, 20))
|
||||||
|
print("SSE4a : %s" % is_set(cpu, 0x80000001, 0, 2, 6))
|
||||||
|
print("AVX : %s" % is_set(cpu, 1, 0, 2, 28))
|
||||||
|
print("AVX2 : %s" % is_set(cpu, 7, 0, 1, 5))
|
||||||
|
print("BMI1 : %s" % is_set(cpu, 7, 0, 1, 3))
|
||||||
|
print("BMI2 : %s" % is_set(cpu, 7, 0, 1, 8))
|
||||||
|
# Intel RDT CMT/MBM
|
||||||
|
print("L3 Monitoring : %s" % is_set(cpu, 0xf, 0, 3, 1))
|
||||||
|
print("L3 Occupancy : %s" % is_set(cpu, 0xf, 1, 3, 0))
|
||||||
|
print("L3 Total BW : %s" % is_set(cpu, 0xf, 1, 3, 1))
|
||||||
|
print("L3 Local BW : %s" % is_set(cpu, 0xf, 1, 3, 2))
|
||||||
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||||
|
index 1badeda585..892c868af3 100644
|
||||||
|
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||||
|
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||||
|
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||||
|
Tag.attrib, merge_attrib]:
|
||||||
|
if hasattr(self, a):
|
||||||
|
if memo is not None:
|
||||||
|
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||||
|
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||||
|
else:
|
||||||
|
setattr(t, a, getattr(self, a))
|
||||||
|
# fmt: on
|
||||||
@@ -42,11 +42,6 @@ def convert_to_posix_path(path: str) -> str:
|
|||||||
return format_os_path(path, mode=Path.unix)
|
return format_os_path(path, mode=Path.unix)
|
||||||
|
|
||||||
|
|
||||||
def convert_to_windows_path(path: str) -> str:
|
|
||||||
"""Converts the input path to Windows style."""
|
|
||||||
return format_os_path(path, mode=Path.windows)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_platform_path(path: str) -> str:
|
def convert_to_platform_path(path: str) -> str:
|
||||||
"""Converts the input path to the current platform's native style."""
|
"""Converts the input path to the current platform's native style."""
|
||||||
return format_os_path(path, mode=Path.platform_path)
|
return format_os_path(path, mode=Path.platform_path)
|
||||||
@@ -103,3 +98,10 @@ def path_filter_caller(*args, **kwargs):
|
|||||||
if _func:
|
if _func:
|
||||||
return holder_func(_func)
|
return holder_func(_func)
|
||||||
return holder_func
|
return holder_func
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_win_longpath(path: str) -> str:
|
||||||
|
"""Strip Windows extended path prefix from strings
|
||||||
|
Returns sanitized string.
|
||||||
|
no-op if extended path prefix is not present"""
|
||||||
|
return path.lstrip("\\\\?\\")
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
# Archive extensions allowed in Spack
|
# Archive extensions allowed in Spack
|
||||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz", "whl")
|
||||||
|
|
||||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||||
ALLOWED_ARCHIVE_TYPES = (
|
ALLOWED_ARCHIVE_TYPES = (
|
||||||
@@ -357,10 +357,8 @@ def strip_version_suffixes(path_or_url: str) -> str:
|
|||||||
r"i[36]86",
|
r"i[36]86",
|
||||||
r"ppc64(le)?",
|
r"ppc64(le)?",
|
||||||
r"armv?(7l|6l|64)?",
|
r"armv?(7l|6l|64)?",
|
||||||
# PyPI
|
# PyPI wheels
|
||||||
r"[._-]py[23].*\.whl",
|
r"-(?:py|cp)[23].*",
|
||||||
r"[._-]cp[23].*\.whl",
|
|
||||||
r"[._-]win.*\.exe",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for regex in suffix_regexes:
|
for regex in suffix_regexes:
|
||||||
@@ -403,7 +401,7 @@ def expand_contracted_extension_in_path(
|
|||||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||||
"""Returns compression extension for a compressed archive"""
|
"""Returns compression extension for a compressed archive"""
|
||||||
extension = expand_contracted_extension(extension)
|
extension = expand_contracted_extension(extension)
|
||||||
for ext in [*EXTENSIONS]:
|
for ext in EXTENSIONS:
|
||||||
if ext in extension:
|
if ext in extension:
|
||||||
return ext
|
return ext
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -187,26 +187,58 @@ def polite_filename(filename: str) -> str:
|
|||||||
return _polite_antipattern().sub("_", filename)
|
return _polite_antipattern().sub("_", filename)
|
||||||
|
|
||||||
|
|
||||||
def getuid():
|
def getuid() -> Union[str, int]:
|
||||||
|
"""Returns os getuid on non Windows
|
||||||
|
On Windows returns 0 for admin users, login string otherwise
|
||||||
|
This is in line with behavior from get_owner_uid which
|
||||||
|
always returns the login string on Windows
|
||||||
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
|
# If not admin, use the string name of the login as a unique ID
|
||||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||||
return 1
|
return os.getlogin()
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
return os.getuid()
|
return os.getuid()
|
||||||
|
|
||||||
|
|
||||||
|
def _win_rename(src, dst):
|
||||||
|
# os.replace will still fail if on Windows (but not POSIX) if the dst
|
||||||
|
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
|
||||||
|
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
|
||||||
|
if os.path.samefile(src, dst):
|
||||||
|
# src and dst are the same
|
||||||
|
# do nothing and exit early
|
||||||
|
return
|
||||||
|
# If dst exists and is a symlink to a directory
|
||||||
|
# we need to remove dst and then perform rename/replace
|
||||||
|
# this is safe to do as there's no chance src == dst now
|
||||||
|
os.remove(dst)
|
||||||
|
os.replace(src, dst)
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def msdos_escape_parens(path):
|
||||||
|
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||||
|
if sys.platform == "win32":
|
||||||
|
return path.replace("(", "^(").replace(")", "^)")
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
# On Windows, os.rename will fail if the destination file already exists
|
# On Windows, os.rename will fail if the destination file already exists
|
||||||
|
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
|
||||||
|
# the MOVEFILE_REPLACE_EXISTING flag on Windows
|
||||||
|
# Windows invocation is abstracted behind additonal logic handling
|
||||||
|
# remaining cases of divergent behavior accross platforms
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
_win_rename(src, dst)
|
||||||
# so check for that case
|
else:
|
||||||
if os.path.exists(dst) or islink(dst):
|
os.replace(src, dst)
|
||||||
os.remove(dst)
|
|
||||||
os.rename(src, dst)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -237,16 +269,6 @@ def _get_mime_type():
|
|||||||
return file_command("-b", "-h", "--mime-type")
|
return file_command("-b", "-h", "--mime-type")
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
|
||||||
def _get_mime_type_compressed():
|
|
||||||
"""Same as _get_mime_type but attempts to check for
|
|
||||||
compression first
|
|
||||||
"""
|
|
||||||
mime_uncompressed = _get_mime_type()
|
|
||||||
mime_uncompressed.add_default_arg("-Z")
|
|
||||||
return mime_uncompressed
|
|
||||||
|
|
||||||
|
|
||||||
def mime_type(filename):
|
def mime_type(filename):
|
||||||
"""Returns the mime type and subtype of a file.
|
"""Returns the mime type and subtype of a file.
|
||||||
|
|
||||||
@@ -262,21 +284,6 @@ def mime_type(filename):
|
|||||||
return type, subtype
|
return type, subtype
|
||||||
|
|
||||||
|
|
||||||
def compressed_mime_type(filename):
|
|
||||||
"""Same as mime_type but checks for type that has been compressed
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filename (str): file to be analyzed
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple containing the MIME type and subtype
|
|
||||||
"""
|
|
||||||
output = _get_mime_type_compressed()(filename, output=str, error=str).strip()
|
|
||||||
tty.debug("==> " + output)
|
|
||||||
type, _, subtype = output.partition("/")
|
|
||||||
return type, subtype
|
|
||||||
|
|
||||||
|
|
||||||
#: This generates the library filenames that may appear on any OS.
|
#: This generates the library filenames that may appear on any OS.
|
||||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||||
|
|
||||||
@@ -308,13 +315,6 @@ def paths_containing_libs(paths, library_names):
|
|||||||
return rpaths_to_include
|
return rpaths_to_include
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
|
||||||
def same_path(path1, path2):
|
|
||||||
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
|
|
||||||
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
|
|
||||||
return norm1 == norm2
|
|
||||||
|
|
||||||
|
|
||||||
def filter_file(
|
def filter_file(
|
||||||
regex: str,
|
regex: str,
|
||||||
repl: Union[str, Callable[[Match], str]],
|
repl: Union[str, Callable[[Match], str]],
|
||||||
@@ -568,7 +568,13 @@ def exploding_archive_handler(tarball_container, stage):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def get_owner_uid(path, err_msg=None):
|
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||||
|
"""Returns owner UID of path destination
|
||||||
|
On non Windows this is the value of st_uid
|
||||||
|
On Windows this is the login string associated with the
|
||||||
|
owning user.
|
||||||
|
|
||||||
|
"""
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
mkdirp(path, mode=stat.S_IRWXU)
|
mkdirp(path, mode=stat.S_IRWXU)
|
||||||
|
|
||||||
@@ -909,17 +915,6 @@ def is_exe(path):
|
|||||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
|
||||||
def get_filetype(path_name):
|
|
||||||
"""
|
|
||||||
Return the output of file path_name as a string to identify file type.
|
|
||||||
"""
|
|
||||||
file = Executable("file")
|
|
||||||
file.add_default_env("LC_ALL", "C")
|
|
||||||
output = file("-b", "-h", "%s" % path_name, output=str, error=str)
|
|
||||||
return output.strip()
|
|
||||||
|
|
||||||
|
|
||||||
def has_shebang(path):
|
def has_shebang(path):
|
||||||
"""Returns whether a path has a shebang line. Returns False if the file cannot be opened."""
|
"""Returns whether a path has a shebang line. Returns False if the file cannot be opened."""
|
||||||
try:
|
try:
|
||||||
@@ -1169,20 +1164,6 @@ def write_tmp_and_move(filename):
|
|||||||
shutil.move(tmp, filename)
|
shutil.move(tmp, filename)
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
@system_path_filter
|
|
||||||
def open_if_filename(str_or_file, mode="r"):
|
|
||||||
"""Takes either a path or a file object, and opens it if it is a path.
|
|
||||||
|
|
||||||
If it's a file object, just yields the file object.
|
|
||||||
"""
|
|
||||||
if isinstance(str_or_file, str):
|
|
||||||
with open(str_or_file, mode) as f:
|
|
||||||
yield f
|
|
||||||
else:
|
|
||||||
yield str_or_file
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def touch(path):
|
def touch(path):
|
||||||
"""Creates an empty file at the specified path."""
|
"""Creates an empty file at the specified path."""
|
||||||
@@ -1274,10 +1255,12 @@ def windows_sfn(path: os.PathLike):
|
|||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||||
|
# Method with null values returns size of short path name
|
||||||
|
sz = k32.GetShortPathNameW(path, None, 0)
|
||||||
# stub Windows types TCHAR[LENGTH]
|
# stub Windows types TCHAR[LENGTH]
|
||||||
TCHAR_arr = ctypes.c_wchar * len(path)
|
TCHAR_arr = ctypes.c_wchar * sz
|
||||||
ret_str = TCHAR_arr()
|
ret_str = TCHAR_arr()
|
||||||
k32.GetShortPathNameW(path, ret_str, len(path))
|
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
|
||||||
return ret_str.value
|
return ret_str.value
|
||||||
|
|
||||||
|
|
||||||
@@ -1295,19 +1278,6 @@ def temp_cwd():
|
|||||||
shutil.rmtree(tmp_dir, **kwargs)
|
shutil.rmtree(tmp_dir, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
@system_path_filter
|
|
||||||
def temp_rename(orig_path, temp_path):
|
|
||||||
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
|
|
||||||
if not same_path:
|
|
||||||
shutil.move(orig_path, temp_path)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
if not same_path:
|
|
||||||
shutil.move(temp_path, orig_path)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def can_access(file_name):
|
def can_access(file_name):
|
||||||
"""True if we have read/write access to the file."""
|
"""True if we have read/write access to the file."""
|
||||||
@@ -2480,9 +2450,10 @@ def add_library_dependent(self, *dest):
|
|||||||
"""
|
"""
|
||||||
for pth in dest:
|
for pth in dest:
|
||||||
if os.path.isfile(pth):
|
if os.path.isfile(pth):
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
new_pth = pathlib.Path(pth)
|
||||||
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rpaths(self):
|
def rpaths(self):
|
||||||
|
|||||||
@@ -98,36 +98,6 @@ def caller_locals():
|
|||||||
del stack
|
del stack
|
||||||
|
|
||||||
|
|
||||||
def get_calling_module_name():
|
|
||||||
"""Make sure that the caller is a class definition, and return the
|
|
||||||
enclosing module's name.
|
|
||||||
"""
|
|
||||||
# Passing zero here skips line context for speed.
|
|
||||||
stack = inspect.stack(0)
|
|
||||||
try:
|
|
||||||
# Make sure locals contain __module__
|
|
||||||
caller_locals = stack[2][0].f_locals
|
|
||||||
finally:
|
|
||||||
del stack
|
|
||||||
|
|
||||||
if "__module__" not in caller_locals:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Must invoke get_calling_module_name() " "from inside a class definition!"
|
|
||||||
)
|
|
||||||
|
|
||||||
module_name = caller_locals["__module__"]
|
|
||||||
base_name = module_name.split(".")[-1]
|
|
||||||
return base_name
|
|
||||||
|
|
||||||
|
|
||||||
def attr_required(obj, attr_name):
|
|
||||||
"""Ensure that a class has a required attribute."""
|
|
||||||
if not hasattr(obj, attr_name):
|
|
||||||
raise RequiredAttributeError(
|
|
||||||
"No required attribute '%s' in class '%s'" % (attr_name, obj.__class__.__name__)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def attr_setdefault(obj, name, value):
|
def attr_setdefault(obj, name, value):
|
||||||
"""Like dict.setdefault, but for objects."""
|
"""Like dict.setdefault, but for objects."""
|
||||||
if not hasattr(obj, name):
|
if not hasattr(obj, name):
|
||||||
@@ -513,42 +483,6 @@ def copy(self):
|
|||||||
return clone
|
return clone
|
||||||
|
|
||||||
|
|
||||||
def in_function(function_name):
|
|
||||||
"""True if the caller was called from some function with
|
|
||||||
the supplied Name, False otherwise."""
|
|
||||||
stack = inspect.stack()
|
|
||||||
try:
|
|
||||||
for elt in stack[2:]:
|
|
||||||
if elt[3] == function_name:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
finally:
|
|
||||||
del stack
|
|
||||||
|
|
||||||
|
|
||||||
def check_kwargs(kwargs, fun):
|
|
||||||
"""Helper for making functions with kwargs. Checks whether the kwargs
|
|
||||||
are empty after all of them have been popped off. If they're
|
|
||||||
not, raises an error describing which kwargs are invalid.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
def foo(self, **kwargs):
|
|
||||||
x = kwargs.pop('x', None)
|
|
||||||
y = kwargs.pop('y', None)
|
|
||||||
z = kwargs.pop('z', None)
|
|
||||||
check_kwargs(kwargs, self.foo)
|
|
||||||
|
|
||||||
# This raises a TypeError:
|
|
||||||
foo(w='bad kwarg')
|
|
||||||
"""
|
|
||||||
if kwargs:
|
|
||||||
raise TypeError(
|
|
||||||
"'%s' is an invalid keyword argument for function %s()."
|
|
||||||
% (next(iter(kwargs)), fun.__name__)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def match_predicate(*args):
|
def match_predicate(*args):
|
||||||
"""Utility function for making string matching predicates.
|
"""Utility function for making string matching predicates.
|
||||||
|
|
||||||
@@ -764,11 +698,6 @@ def pretty_seconds(seconds):
|
|||||||
return pretty_seconds_formatter(seconds)(seconds)
|
return pretty_seconds_formatter(seconds)(seconds)
|
||||||
|
|
||||||
|
|
||||||
class RequiredAttributeError(ValueError):
|
|
||||||
def __init__(self, message):
|
|
||||||
super().__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectWrapper:
|
class ObjectWrapper:
|
||||||
"""Base class that wraps an object. Derived classes can add new behavior
|
"""Base class that wraps an object. Derived classes can add new behavior
|
||||||
while staying undercover.
|
while staying undercover.
|
||||||
@@ -843,6 +772,30 @@ def __repr__(self):
|
|||||||
return repr(self.instance)
|
return repr(self.instance)
|
||||||
|
|
||||||
|
|
||||||
|
def get_entry_points(*, group: str):
|
||||||
|
"""Wrapper for ``importlib.metadata.entry_points``
|
||||||
|
|
||||||
|
Args:
|
||||||
|
group: entry points to select
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
EntryPoints for ``group`` or empty list if unsupported
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
import importlib.metadata # type: ignore # novermin
|
||||||
|
except ImportError:
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
return importlib.metadata.entry_points(group=group)
|
||||||
|
except TypeError:
|
||||||
|
# Prior to Python 3.10, entry_points accepted no parameters and always
|
||||||
|
# returned a dictionary of entry points, keyed by group. See
|
||||||
|
# https://docs.python.org/3/library/importlib.metadata.html#entry-points
|
||||||
|
return importlib.metadata.entry_points().get(group, [])
|
||||||
|
|
||||||
|
|
||||||
def load_module_from_file(module_name, module_path):
|
def load_module_from_file(module_name, module_path):
|
||||||
"""Loads a python module from the path of the corresponding file.
|
"""Loads a python module from the path of the corresponding file.
|
||||||
|
|
||||||
@@ -911,25 +864,6 @@ def uniq(sequence):
|
|||||||
return uniq_list
|
return uniq_list
|
||||||
|
|
||||||
|
|
||||||
def star(func):
|
|
||||||
"""Unpacks arguments for use with Multiprocessing mapping functions"""
|
|
||||||
|
|
||||||
def _wrapper(args):
|
|
||||||
return func(*args)
|
|
||||||
|
|
||||||
return _wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class Devnull:
|
|
||||||
"""Null stream with less overhead than ``os.devnull``.
|
|
||||||
|
|
||||||
See https://stackoverflow.com/a/2929954.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def write(self, *_):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def elide_list(line_list, max_num=10):
|
def elide_list(line_list, max_num=10):
|
||||||
"""Takes a long list and limits it to a smaller number of elements,
|
"""Takes a long list and limits it to a smaller number of elements,
|
||||||
replacing intervening elements with '...'. For example::
|
replacing intervening elements with '...'. For example::
|
||||||
|
|||||||
@@ -815,10 +815,6 @@ def __init__(self, path):
|
|||||||
super().__init__(msg)
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class LockLimitError(LockError):
|
|
||||||
"""Raised when exceed maximum attempts to acquire a lock."""
|
|
||||||
|
|
||||||
|
|
||||||
class LockTimeoutError(LockError):
|
class LockTimeoutError(LockError):
|
||||||
"""Raised when an attempt to acquire a lock times out."""
|
"""Raised when an attempt to acquire a lock times out."""
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
from ..path import system_path_filter
|
from ..path import sanitize_win_longpath, system_path_filter
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
@@ -247,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
|
|||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
tty.debug(out.decode())
|
tty.debug(out.decode())
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
err = err.decode()
|
err_str = err.decode()
|
||||||
tty.error(err)
|
tty.error(err_str)
|
||||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||||
|
|
||||||
|
|
||||||
def _windows_create_hard_link(path: str, link: str):
|
def _windows_create_hard_link(path: str, link: str):
|
||||||
@@ -269,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
|
|||||||
CreateHardLink(link, path)
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
def readlink(path: str):
|
def readlink(path: str, *, dir_fd=None):
|
||||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||||
if _windows_is_hardlink(path):
|
if _windows_is_hardlink(path):
|
||||||
return _windows_read_hard_link(path)
|
return _windows_read_hard_link(path)
|
||||||
elif _windows_is_junction(path):
|
elif _windows_is_junction(path):
|
||||||
return _windows_read_junction(path)
|
return _windows_read_junction(path)
|
||||||
else:
|
else:
|
||||||
return os.readlink(path)
|
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||||
|
|
||||||
|
|
||||||
def _windows_read_hard_link(link: str) -> str:
|
def _windows_read_hard_link(link: str) -> str:
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sys import platform as _platform
|
from sys import platform as _platform
|
||||||
from typing import NoReturn
|
from typing import Any, NoReturn
|
||||||
|
|
||||||
if _platform != "win32":
|
if _platform != "win32":
|
||||||
import fcntl
|
import fcntl
|
||||||
@@ -44,10 +44,6 @@ def is_debug(level=1):
|
|||||||
return _debug >= level
|
return _debug >= level
|
||||||
|
|
||||||
|
|
||||||
def is_stacktrace():
|
|
||||||
return _stacktrace
|
|
||||||
|
|
||||||
|
|
||||||
def set_debug(level=0):
|
def set_debug(level=0):
|
||||||
global _debug
|
global _debug
|
||||||
assert level >= 0, "Debug level must be a positive value"
|
assert level >= 0, "Debug level must be a positive value"
|
||||||
@@ -162,21 +158,22 @@ def get_timestamp(force=False):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def msg(message, *args, **kwargs):
|
def msg(message: Any, *args: Any, newline: bool = True) -> None:
|
||||||
if not msg_enabled():
|
if not msg_enabled():
|
||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(message, Exception):
|
if isinstance(message, Exception):
|
||||||
message = "%s: %s" % (message.__class__.__name__, str(message))
|
message = f"{message.__class__.__name__}: {message}"
|
||||||
|
else:
|
||||||
|
message = str(message)
|
||||||
|
|
||||||
newline = kwargs.get("newline", True)
|
|
||||||
st_text = ""
|
st_text = ""
|
||||||
if _stacktrace:
|
if _stacktrace:
|
||||||
st_text = process_stacktrace(2)
|
st_text = process_stacktrace(2)
|
||||||
if newline:
|
|
||||||
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
nl = "\n" if newline else ""
|
||||||
else:
|
cwrite(f"@*b{{{st_text}==>}} {get_timestamp()}{cescape(_output_filter(message))}{nl}")
|
||||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
print(indent + _output_filter(str(arg)))
|
print(indent + _output_filter(str(arg)))
|
||||||
|
|
||||||
@@ -252,37 +249,6 @@ def die(message, *args, **kwargs) -> NoReturn:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def get_number(prompt, **kwargs):
|
|
||||||
default = kwargs.get("default", None)
|
|
||||||
abort = kwargs.get("abort", None)
|
|
||||||
|
|
||||||
if default is not None and abort is not None:
|
|
||||||
prompt += " (default is %s, %s to abort) " % (default, abort)
|
|
||||||
elif default is not None:
|
|
||||||
prompt += " (default is %s) " % default
|
|
||||||
elif abort is not None:
|
|
||||||
prompt += " (%s to abort) " % abort
|
|
||||||
|
|
||||||
number = None
|
|
||||||
while number is None:
|
|
||||||
msg(prompt, newline=False)
|
|
||||||
ans = input()
|
|
||||||
if ans == str(abort):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if ans:
|
|
||||||
try:
|
|
||||||
number = int(ans)
|
|
||||||
if number < 1:
|
|
||||||
msg("Please enter a valid number.")
|
|
||||||
number = None
|
|
||||||
except ValueError:
|
|
||||||
msg("Please enter a valid number.")
|
|
||||||
elif default is not None:
|
|
||||||
number = default
|
|
||||||
return number
|
|
||||||
|
|
||||||
|
|
||||||
def get_yes_or_no(prompt, **kwargs):
|
def get_yes_or_no(prompt, **kwargs):
|
||||||
default_value = kwargs.get("default", None)
|
default_value = kwargs.get("default", None)
|
||||||
|
|
||||||
|
|||||||
@@ -237,7 +237,6 @@ def transpose():
|
|||||||
def colified(
|
def colified(
|
||||||
elts: List[Any],
|
elts: List[Any],
|
||||||
cols: int = 0,
|
cols: int = 0,
|
||||||
output: Optional[IO] = None,
|
|
||||||
indent: int = 0,
|
indent: int = 0,
|
||||||
padding: int = 2,
|
padding: int = 2,
|
||||||
tty: Optional[bool] = None,
|
tty: Optional[bool] = None,
|
||||||
|
|||||||
@@ -59,9 +59,11 @@
|
|||||||
|
|
||||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
class ColorParseError(Exception):
|
class ColorParseError(Exception):
|
||||||
@@ -95,14 +97,34 @@ def __init__(self, message):
|
|||||||
} # white
|
} # white
|
||||||
|
|
||||||
# Regex to be used for color formatting
|
# Regex to be used for color formatting
|
||||||
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
|
COLOR_RE = re.compile(r"@(?:(@)|(\.)|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)")
|
||||||
|
|
||||||
# Mapping from color arguments to values for tty.set_color
|
# Mapping from color arguments to values for tty.set_color
|
||||||
color_when_values = {"always": True, "auto": None, "never": False}
|
color_when_values = {"always": True, "auto": None, "never": False}
|
||||||
|
|
||||||
# Force color; None: Only color if stdout is a tty
|
|
||||||
# True: Always colorize output, False: Never colorize output
|
def _color_when_value(when):
|
||||||
_force_color = None
|
"""Raise a ValueError for an invalid color setting.
|
||||||
|
|
||||||
|
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||||
|
True, False, and None.
|
||||||
|
"""
|
||||||
|
if when in color_when_values:
|
||||||
|
return color_when_values[when]
|
||||||
|
elif when not in color_when_values.values():
|
||||||
|
raise ValueError("Invalid color setting: %s" % when)
|
||||||
|
return when
|
||||||
|
|
||||||
|
|
||||||
|
def _color_from_environ() -> Optional[bool]:
|
||||||
|
try:
|
||||||
|
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
|
||||||
|
_force_color = _color_from_environ()
|
||||||
|
|
||||||
|
|
||||||
def try_enable_terminal_color_on_windows():
|
def try_enable_terminal_color_on_windows():
|
||||||
@@ -163,19 +185,6 @@ def _err_check(result, func, args):
|
|||||||
debug("Unable to support color on Windows terminal")
|
debug("Unable to support color on Windows terminal")
|
||||||
|
|
||||||
|
|
||||||
def _color_when_value(when):
|
|
||||||
"""Raise a ValueError for an invalid color setting.
|
|
||||||
|
|
||||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
|
||||||
True, False, and None.
|
|
||||||
"""
|
|
||||||
if when in color_when_values:
|
|
||||||
return color_when_values[when]
|
|
||||||
elif when not in color_when_values.values():
|
|
||||||
raise ValueError("Invalid color setting: %s" % when)
|
|
||||||
return when
|
|
||||||
|
|
||||||
|
|
||||||
def get_color_when():
|
def get_color_when():
|
||||||
"""Return whether commands should print color or not."""
|
"""Return whether commands should print color or not."""
|
||||||
if _force_color is not None:
|
if _force_color is not None:
|
||||||
@@ -203,77 +212,64 @@ def color_when(value):
|
|||||||
set_color_when(old_value)
|
set_color_when(old_value)
|
||||||
|
|
||||||
|
|
||||||
class match_to_ansi:
|
def _escape(s: str, color: bool, enclose: bool, zsh: bool) -> str:
|
||||||
def __init__(self, color=True, enclose=False, zsh=False):
|
"""Returns a TTY escape sequence for a color"""
|
||||||
self.color = _color_when_value(color)
|
if color:
|
||||||
self.enclose = enclose
|
if zsh:
|
||||||
self.zsh = zsh
|
result = rf"\e[0;{s}m"
|
||||||
|
|
||||||
def escape(self, s):
|
|
||||||
"""Returns a TTY escape sequence for a color"""
|
|
||||||
if self.color:
|
|
||||||
if self.zsh:
|
|
||||||
result = rf"\e[0;{s}m"
|
|
||||||
else:
|
|
||||||
result = f"\033[{s}m"
|
|
||||||
|
|
||||||
if self.enclose:
|
|
||||||
result = rf"\[{result}\]"
|
|
||||||
|
|
||||||
return result
|
|
||||||
else:
|
else:
|
||||||
return ""
|
result = f"\033[{s}m"
|
||||||
|
|
||||||
def __call__(self, match):
|
if enclose:
|
||||||
"""Convert a match object generated by ``color_re`` into an ansi
|
result = rf"\[{result}\]"
|
||||||
color code. This can be used as a handler in ``re.sub``.
|
|
||||||
"""
|
|
||||||
style, color, text = match.groups()
|
|
||||||
m = match.group(0)
|
|
||||||
|
|
||||||
if m == "@@":
|
return result
|
||||||
return "@"
|
else:
|
||||||
elif m == "@.":
|
return ""
|
||||||
return self.escape(0)
|
|
||||||
elif m == "@":
|
|
||||||
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
|
|
||||||
|
|
||||||
string = styles[style]
|
|
||||||
if color:
|
|
||||||
if color not in colors:
|
|
||||||
raise ColorParseError(
|
|
||||||
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
|
|
||||||
)
|
|
||||||
string += ";" + str(colors[color])
|
|
||||||
|
|
||||||
colored_text = ""
|
|
||||||
if text:
|
|
||||||
colored_text = text + self.escape(0)
|
|
||||||
|
|
||||||
return self.escape(string) + colored_text
|
|
||||||
|
|
||||||
|
|
||||||
def colorize(string, **kwargs):
|
def colorize(
|
||||||
|
string: str, color: Optional[bool] = None, enclose: bool = False, zsh: bool = False
|
||||||
|
) -> str:
|
||||||
"""Replace all color expressions in a string with ANSI control codes.
|
"""Replace all color expressions in a string with ANSI control codes.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
string (str): The string to replace
|
string: The string to replace
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The filtered string
|
The filtered string
|
||||||
|
|
||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
color (bool): If False, output will be plain text without control
|
color: If False, output will be plain text without control codes, for output to
|
||||||
codes, for output to non-console devices.
|
non-console devices (default: automatically choose color or not)
|
||||||
enclose (bool): If True, enclose ansi color sequences with
|
enclose: If True, enclose ansi color sequences with
|
||||||
square brackets to prevent misestimation of terminal width.
|
square brackets to prevent misestimation of terminal width.
|
||||||
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
zsh: If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||||
"""
|
"""
|
||||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
color = color if color is not None else get_color_when()
|
||||||
zsh = kwargs.get("zsh", False)
|
|
||||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
|
def match_to_ansi(match):
|
||||||
string = string.replace("}}", "}")
|
"""Convert a match object generated by ``COLOR_RE`` into an ansi
|
||||||
return string
|
color code. This can be used as a handler in ``re.sub``.
|
||||||
|
"""
|
||||||
|
escaped_at, dot, style, color_code, text = match.groups()
|
||||||
|
|
||||||
|
if escaped_at:
|
||||||
|
return "@"
|
||||||
|
elif dot:
|
||||||
|
return _escape(0, color, enclose, zsh)
|
||||||
|
elif not (style or color_code):
|
||||||
|
raise ColorParseError(
|
||||||
|
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||||
|
if text:
|
||||||
|
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||||
|
else:
|
||||||
|
return ansi_code
|
||||||
|
|
||||||
|
return COLOR_RE.sub(match_to_ansi, string).replace("}}", "}")
|
||||||
|
|
||||||
|
|
||||||
def clen(string):
|
def clen(string):
|
||||||
@@ -305,7 +301,7 @@ def cprint(string, stream=None, color=None):
|
|||||||
cwrite(string + "\n", stream, color)
|
cwrite(string + "\n", stream, color)
|
||||||
|
|
||||||
|
|
||||||
def cescape(string):
|
def cescape(string: str) -> str:
|
||||||
"""Escapes special characters needed for color codes.
|
"""Escapes special characters needed for color codes.
|
||||||
|
|
||||||
Replaces the following symbols with their equivalent literal forms:
|
Replaces the following symbols with their equivalent literal forms:
|
||||||
@@ -321,10 +317,7 @@ def cescape(string):
|
|||||||
Returns:
|
Returns:
|
||||||
(str): the string with color codes escaped
|
(str): the string with color codes escaped
|
||||||
"""
|
"""
|
||||||
string = str(string)
|
return string.replace("@", "@@").replace("}", "}}")
|
||||||
string = string.replace("@", "@@")
|
|
||||||
string = string.replace("}", "}}")
|
|
||||||
return string
|
|
||||||
|
|
||||||
|
|
||||||
class ColorStream:
|
class ColorStream:
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.22.0.dev0"
|
__version__ = "0.23.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
|||||||
|
|
||||||
@config_packages
|
@config_packages
|
||||||
def _deprecated_preferences(error_cls):
|
def _deprecated_preferences(error_cls):
|
||||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||||
errors = []
|
errors = []
|
||||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||||
|
|
||||||
@@ -1046,7 +1046,7 @@ def _extracts_errors(triggers, summary):
|
|||||||
group="externals",
|
group="externals",
|
||||||
tag="PKG-EXTERNALS",
|
tag="PKG-EXTERNALS",
|
||||||
description="Sanity checks for external software detection",
|
description="Sanity checks for external software detection",
|
||||||
kwargs=("pkgs",),
|
kwargs=("pkgs", "debug_log"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1069,7 +1069,7 @@ def packages_with_detection_tests():
|
|||||||
|
|
||||||
|
|
||||||
@external_detection
|
@external_detection
|
||||||
def _test_detection_by_executable(pkgs, error_cls):
|
def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||||
"""Test drive external detection for packages"""
|
"""Test drive external detection for packages"""
|
||||||
import spack.detection
|
import spack.detection
|
||||||
|
|
||||||
@@ -1095,6 +1095,7 @@ def _test_detection_by_executable(pkgs, error_cls):
|
|||||||
for idx, test_runner in enumerate(
|
for idx, test_runner in enumerate(
|
||||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||||
):
|
):
|
||||||
|
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
|
||||||
specs = test_runner.execute()
|
specs = test_runner.execute()
|
||||||
expected_specs = test_runner.expected_specs
|
expected_specs = test_runner.expected_specs
|
||||||
|
|
||||||
@@ -1111,4 +1112,75 @@ def _test_detection_by_executable(pkgs, error_cls):
|
|||||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||||
errors.append(error_cls(summary=summary, details=details))
|
errors.append(error_cls(summary=summary, details=details))
|
||||||
|
|
||||||
|
matched_detection = []
|
||||||
|
for candidate in expected_specs:
|
||||||
|
try:
|
||||||
|
idx = specs.index(candidate)
|
||||||
|
matched_detection.append((candidate, specs[idx]))
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||||
|
result = []
|
||||||
|
# Check items are of the same type
|
||||||
|
if not isinstance(_detected, type(_expected)):
|
||||||
|
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||||
|
_details = [f"{_detected} was detected instead"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
|
# If they are string expected is a regex
|
||||||
|
if isinstance(_expected, str):
|
||||||
|
try:
|
||||||
|
_regex = re.compile(_expected)
|
||||||
|
except re.error:
|
||||||
|
_summary = f'{pkg_name}: illegal regex in "{_spec}" extra attributes'
|
||||||
|
_details = [f"{_expected} is not a valid regex"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
|
if not _regex.match(_detected):
|
||||||
|
_summary = (
|
||||||
|
f'{pkg_name}: error when trying to match "{_expected}" '
|
||||||
|
f"in extra attributes"
|
||||||
|
)
|
||||||
|
_details = [f"{_detected} does not match the regex"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
|
if isinstance(_expected, dict):
|
||||||
|
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||||
|
if _not_detected:
|
||||||
|
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||||
|
_details = [
|
||||||
|
f'"{_expected}" was expected',
|
||||||
|
f'"{_detected}" was detected',
|
||||||
|
] + [f'attribute "{s}" was not detected' for s in sorted(_not_detected)]
|
||||||
|
result.append(error_cls(summary=_summary, details=_details))
|
||||||
|
|
||||||
|
_common = set(_expected.keys()) & set(_detected.keys())
|
||||||
|
for _key in _common:
|
||||||
|
result.extend(
|
||||||
|
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
for expected, detected in matched_detection:
|
||||||
|
# We might not want to test all attributes, so avoid not_expected
|
||||||
|
not_detected = set(expected.extra_attributes) - set(detected.extra_attributes)
|
||||||
|
if not_detected:
|
||||||
|
summary = f"{pkg_name}: cannot detect some attributes for spec {expected}"
|
||||||
|
details = [
|
||||||
|
f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)
|
||||||
|
]
|
||||||
|
errors.append(error_cls(summary=summary, details=details))
|
||||||
|
|
||||||
|
common = set(expected.extra_attributes) & set(detected.extra_attributes)
|
||||||
|
for key in common:
|
||||||
|
errors.extend(
|
||||||
|
_compare_extra_attribute(
|
||||||
|
expected.extra_attributes[key],
|
||||||
|
detected.extra_attributes[key],
|
||||||
|
_spec=expected,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|||||||
@@ -17,7 +17,6 @@
|
|||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import traceback
|
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
@@ -30,6 +29,7 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -111,10 +111,6 @@ def __init__(self, errors):
|
|||||||
super().__init__(self.message)
|
super().__init__(self.message)
|
||||||
|
|
||||||
|
|
||||||
class ListMirrorSpecsError(spack.error.SpackError):
|
|
||||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
|
||||||
|
|
||||||
|
|
||||||
class BinaryCacheIndex:
|
class BinaryCacheIndex:
|
||||||
"""
|
"""
|
||||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||||
@@ -541,83 +537,6 @@ def binary_index_location():
|
|||||||
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class NoOverwriteException(spack.error.SpackError):
|
|
||||||
"""Raised when a file would be overwritten"""
|
|
||||||
|
|
||||||
def __init__(self, file_path):
|
|
||||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
|
||||||
|
|
||||||
|
|
||||||
class NoGpgException(spack.error.SpackError):
|
|
||||||
"""
|
|
||||||
Raised when gpg2 is not in PATH
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, msg):
|
|
||||||
super().__init__(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class NoKeyException(spack.error.SpackError):
|
|
||||||
"""
|
|
||||||
Raised when gpg has no default key added.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, msg):
|
|
||||||
super().__init__(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class PickKeyException(spack.error.SpackError):
|
|
||||||
"""
|
|
||||||
Raised when multiple keys can be used to sign.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, keys):
|
|
||||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
|
||||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
|
||||||
super().__init__(err_msg)
|
|
||||||
|
|
||||||
|
|
||||||
class NoVerifyException(spack.error.SpackError):
|
|
||||||
"""
|
|
||||||
Raised if file fails signature verification.
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NoChecksumException(spack.error.SpackError):
|
|
||||||
"""
|
|
||||||
Raised if file fails checksum verification.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
|
||||||
super().__init__(
|
|
||||||
f"{algorithm} checksum failed for {path}",
|
|
||||||
f"Expected {expected} but got {computed}. "
|
|
||||||
f"File size = {size} bytes. Contents = {contents!r}",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NewLayoutException(spack.error.SpackError):
|
|
||||||
"""
|
|
||||||
Raised if directory layout is different from buildcache.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, msg):
|
|
||||||
super().__init__(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidMetadataFile(spack.error.SpackError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UnsignedPackageException(spack.error.SpackError):
|
|
||||||
"""
|
|
||||||
Raised if installation of unsigned package is attempted without
|
|
||||||
the use of ``--no-check-signature``.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def compute_hash(data):
|
def compute_hash(data):
|
||||||
if isinstance(data, str):
|
if isinstance(data, str):
|
||||||
data = data.encode("utf-8")
|
data = data.encode("utf-8")
|
||||||
@@ -740,7 +659,7 @@ def get_buildfile_manifest(spec):
|
|||||||
# 2. paths are used as strings.
|
# 2. paths are used as strings.
|
||||||
for rel_path in visitor.symlinks:
|
for rel_path in visitor.symlinks:
|
||||||
abs_path = os.path.join(root, rel_path)
|
abs_path = os.path.join(root, rel_path)
|
||||||
link = os.readlink(abs_path)
|
link = readlink(abs_path)
|
||||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||||
data["link_to_relocate"].append(rel_path)
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
@@ -992,15 +911,10 @@ def url_read_method(url):
|
|||||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||||
]
|
]
|
||||||
read_fn = url_read_method
|
read_fn = url_read_method
|
||||||
except KeyError as inst:
|
|
||||||
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
|
||||||
tty.warn(msg)
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# If we got some kind of S3 (access denied or other connection
|
# If we got some kind of S3 (access denied or other connection error), the first non
|
||||||
# error), the first non boto-specific class in the exception
|
# boto-specific class in the exception is Exception. Just print a warning and return
|
||||||
# hierarchy is Exception. Just print a warning and return
|
tty.warn(f"Encountered problem listing packages at {cache_prefix}: {err}")
|
||||||
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
return file_list, read_fn
|
return file_list, read_fn
|
||||||
|
|
||||||
@@ -1047,11 +961,10 @@ def generate_package_index(cache_prefix, concurrency=32):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
file_list, read_fn = _spec_files_from_cache(cache_prefix)
|
file_list, read_fn = _spec_files_from_cache(cache_prefix)
|
||||||
except ListMirrorSpecsError as err:
|
except ListMirrorSpecsError as e:
|
||||||
tty.error("Unable to generate package index, {0}".format(err))
|
raise GenerateIndexError(f"Unable to generate package index: {e}") from e
|
||||||
return
|
|
||||||
|
|
||||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
tty.debug(f"Retrieving spec descriptor files from {cache_prefix} to build index")
|
||||||
|
|
||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
|
||||||
@@ -1061,27 +974,22 @@ def generate_package_index(cache_prefix, concurrency=32):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||||
except Exception as err:
|
except Exception as e:
|
||||||
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
|
raise GenerateIndexError(
|
||||||
tty.warn(msg)
|
f"Encountered problem pushing package index to {cache_prefix}: {e}"
|
||||||
tty.debug("\n" + traceback.format_exc())
|
) from e
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
def generate_key_index(key_prefix, tmpdir=None):
|
def generate_key_index(key_prefix, tmpdir=None):
|
||||||
"""Create the key index page.
|
"""Create the key index page.
|
||||||
|
|
||||||
Creates (or replaces) the "index.json" page at the location given in
|
Creates (or replaces) the "index.json" page at the location given in key_prefix. This page
|
||||||
key_prefix. This page contains an entry for each key (.pub) under
|
contains an entry for each key (.pub) under key_prefix.
|
||||||
key_prefix.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
tty.debug(
|
tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index")
|
||||||
" ".join(
|
|
||||||
("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fingerprints = (
|
fingerprints = (
|
||||||
@@ -1089,17 +997,8 @@ def generate_key_index(key_prefix, tmpdir=None):
|
|||||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||||
if entry.endswith(".pub")
|
if entry.endswith(".pub")
|
||||||
)
|
)
|
||||||
except KeyError as inst:
|
except Exception as e:
|
||||||
msg = "No keys at {0}: {1}".format(key_prefix, inst)
|
raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e
|
||||||
tty.warn(msg)
|
|
||||||
return
|
|
||||||
except Exception as err:
|
|
||||||
# If we got some kind of S3 (access denied or other connection
|
|
||||||
# error), the first non boto-specific class in the exception
|
|
||||||
# hierarchy is Exception. Just print a warning and return
|
|
||||||
msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
|
|
||||||
tty.warn(msg)
|
|
||||||
return
|
|
||||||
|
|
||||||
remove_tmpdir = False
|
remove_tmpdir = False
|
||||||
|
|
||||||
@@ -1124,12 +1023,13 @@ def generate_key_index(key_prefix, tmpdir=None):
|
|||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "application/json"},
|
extra_args={"ContentType": "application/json"},
|
||||||
)
|
)
|
||||||
except Exception as err:
|
except Exception as e:
|
||||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
raise GenerateIndexError(
|
||||||
tty.warn(msg)
|
f"Encountered problem pushing key index to {key_prefix}: {e}"
|
||||||
|
) from e
|
||||||
finally:
|
finally:
|
||||||
if remove_tmpdir:
|
if remove_tmpdir:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||||
@@ -1200,7 +1100,8 @@ def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
|
|||||||
used at the mirror (following <tarball_directory_name>).
|
used at the mirror (following <tarball_directory_name>).
|
||||||
|
|
||||||
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
|
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
|
||||||
spec.json file already exist in the buildcache.
|
spec.json file already exist in the buildcache. It raises :py:class:`PushToBuildCacheError`
|
||||||
|
when the tarball or spec.json file cannot be pushed to the buildcache.
|
||||||
"""
|
"""
|
||||||
if not spec.concrete:
|
if not spec.concrete:
|
||||||
raise ValueError("spec must be concrete to build tarball")
|
raise ValueError("spec must be concrete to build tarball")
|
||||||
@@ -1278,13 +1179,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
key = select_signing_key(options.key)
|
key = select_signing_key(options.key)
|
||||||
sign_specfile(key, options.force, specfile_path)
|
sign_specfile(key, options.force, specfile_path)
|
||||||
|
|
||||||
# push tarball and signed spec json to remote mirror
|
try:
|
||||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
# push tarball and signed spec json to remote mirror
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||||
signed_specfile_path if not options.unsigned else specfile_path,
|
web_util.push_to_url(
|
||||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
signed_specfile_path if not options.unsigned else specfile_path,
|
||||||
keep_original=False,
|
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||||
)
|
keep_original=False,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise PushToBuildCacheError(
|
||||||
|
f"Encountered problem pushing binary {remote_spackfile_path}: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
# push the key to the build cache's _pgp directory so it can be
|
# push the key to the build cache's _pgp directory so it can be
|
||||||
# imported
|
# imported
|
||||||
@@ -1296,8 +1202,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
if options.regenerate_index:
|
if options.regenerate_index:
|
||||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class NotInstalledError(spack.error.SpackError):
|
class NotInstalledError(spack.error.SpackError):
|
||||||
"""Raised when a spec is not installed but picked to be packaged."""
|
"""Raised when a spec is not installed but picked to be packaged."""
|
||||||
@@ -1352,28 +1256,6 @@ def specs_to_be_packaged(
|
|||||||
return [s for s in itertools.chain(roots, deps) if not s.external]
|
return [s for s in itertools.chain(roots, deps) if not s.external]
|
||||||
|
|
||||||
|
|
||||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
|
||||||
"""Create and push binary package for a single spec to the specified
|
|
||||||
mirror url.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
spec: Spec to package and push
|
|
||||||
mirror_url: Desired destination url for binary package
|
|
||||||
options:
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if package was pushed, False otherwise.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
push_or_raise(spec, mirror_url, options)
|
|
||||||
except NoOverwriteException as e:
|
|
||||||
warnings.warn(str(e))
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def try_verify(specfile_path):
|
def try_verify(specfile_path):
|
||||||
"""Utility function to attempt to verify a local file. Assumes the
|
"""Utility function to attempt to verify a local file. Assumes the
|
||||||
file is a clearsigned signature file.
|
file is a clearsigned signature file.
|
||||||
@@ -2120,6 +2002,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
with spack.util.path.filter_padding():
|
with spack.util.path.filter_padding():
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
|
spec.package.windows_establish_runtime_linkage()
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||||
|
|
||||||
@@ -2706,3 +2589,96 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
||||||
|
|
||||||
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
||||||
|
|
||||||
|
|
||||||
|
class NoOverwriteException(spack.error.SpackError):
|
||||||
|
"""Raised when a file would be overwritten"""
|
||||||
|
|
||||||
|
def __init__(self, file_path):
|
||||||
|
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||||
|
|
||||||
|
|
||||||
|
class NoGpgException(spack.error.SpackError):
|
||||||
|
"""
|
||||||
|
Raised when gpg2 is not in PATH
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, msg):
|
||||||
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class NoKeyException(spack.error.SpackError):
|
||||||
|
"""
|
||||||
|
Raised when gpg has no default key added.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, msg):
|
||||||
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class PickKeyException(spack.error.SpackError):
|
||||||
|
"""
|
||||||
|
Raised when multiple keys can be used to sign.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, keys):
|
||||||
|
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||||
|
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||||
|
super().__init__(err_msg)
|
||||||
|
|
||||||
|
|
||||||
|
class NoVerifyException(spack.error.SpackError):
|
||||||
|
"""
|
||||||
|
Raised if file fails signature verification.
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NoChecksumException(spack.error.SpackError):
|
||||||
|
"""
|
||||||
|
Raised if file fails checksum verification.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||||
|
super().__init__(
|
||||||
|
f"{algorithm} checksum failed for {path}",
|
||||||
|
f"Expected {expected} but got {computed}. "
|
||||||
|
f"File size = {size} bytes. Contents = {contents!r}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class NewLayoutException(spack.error.SpackError):
|
||||||
|
"""
|
||||||
|
Raised if directory layout is different from buildcache.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, msg):
|
||||||
|
super().__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidMetadataFile(spack.error.SpackError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UnsignedPackageException(spack.error.SpackError):
|
||||||
|
"""
|
||||||
|
Raised if installation of unsigned package is attempted without
|
||||||
|
the use of ``--no-check-signature``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ListMirrorSpecsError(spack.error.SpackError):
|
||||||
|
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateIndexError(spack.error.SpackError):
|
||||||
|
"""Raised when unable to generate key or package index for mirror"""
|
||||||
|
|
||||||
|
|
||||||
|
class CannotListKeys(GenerateIndexError):
|
||||||
|
"""Raised when unable to list keys when generating key index"""
|
||||||
|
|
||||||
|
|
||||||
|
class PushToBuildCacheError(spack.error.SpackError):
|
||||||
|
"""Raised when unable to push objects to binary mirror"""
|
||||||
|
|||||||
@@ -5,7 +5,13 @@
|
|||||||
"""Function and classes needed to bootstrap Spack itself."""
|
"""Function and classes needed to bootstrap Spack itself."""
|
||||||
|
|
||||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
from .core import (
|
||||||
|
all_core_root_specs,
|
||||||
|
ensure_clingo_importable_or_raise,
|
||||||
|
ensure_core_dependencies,
|
||||||
|
ensure_gpg_in_path_or_raise,
|
||||||
|
ensure_patchelf_in_path_or_raise,
|
||||||
|
)
|
||||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||||
from .status import status_message
|
from .status import status_message
|
||||||
|
|
||||||
@@ -13,6 +19,8 @@
|
|||||||
"is_bootstrapping",
|
"is_bootstrapping",
|
||||||
"ensure_bootstrap_configuration",
|
"ensure_bootstrap_configuration",
|
||||||
"ensure_core_dependencies",
|
"ensure_core_dependencies",
|
||||||
|
"ensure_gpg_in_path_or_raise",
|
||||||
|
"ensure_clingo_importable_or_raise",
|
||||||
"ensure_patchelf_in_path_or_raise",
|
"ensure_patchelf_in_path_or_raise",
|
||||||
"all_core_root_specs",
|
"all_core_root_specs",
|
||||||
"ensure_environment_dependencies",
|
"ensure_environment_dependencies",
|
||||||
|
|||||||
@@ -54,10 +54,14 @@ def _try_import_from_store(
|
|||||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||||
|
|
||||||
for candidate_spec in installed_specs:
|
for candidate_spec in installed_specs:
|
||||||
pkg = candidate_spec["python"].package
|
# previously bootstrapped specs may not have a python-venv dependency.
|
||||||
|
if candidate_spec.dependencies("python-venv"):
|
||||||
|
python, *_ = candidate_spec.dependencies("python-venv")
|
||||||
|
else:
|
||||||
|
python, *_ = candidate_spec.dependencies("python")
|
||||||
module_paths = [
|
module_paths = [
|
||||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||||
]
|
]
|
||||||
path_before = list(sys.path)
|
path_before = list(sys.path)
|
||||||
|
|
||||||
@@ -213,9 +217,6 @@ def _root_spec(spec_str: str) -> str:
|
|||||||
platform = str(spack.platforms.host())
|
platform = str(spack.platforms.host())
|
||||||
if platform == "darwin":
|
if platform == "darwin":
|
||||||
spec_str += " %apple-clang"
|
spec_str += " %apple-clang"
|
||||||
elif platform == "windows":
|
|
||||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
|
||||||
spec_str += " %msvc@:19.37"
|
|
||||||
elif platform == "linux":
|
elif platform == "linux":
|
||||||
spec_str += " %gcc"
|
spec_str += " %gcc"
|
||||||
elif platform == "freebsd":
|
elif platform == "freebsd":
|
||||||
|
|||||||
@@ -147,7 +147,7 @@ def _add_compilers_if_missing() -> None:
|
|||||||
mixed_toolchain=sys.platform == "darwin"
|
mixed_toolchain=sys.platform == "darwin"
|
||||||
)
|
)
|
||||||
if new_compilers:
|
if new_compilers:
|
||||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
spack.compilers.add_compilers_to_config(new_compilers)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
|
|||||||
@@ -173,35 +173,14 @@ def _read_metadata(self, package_name: str) -> Any:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def _install_by_hash(
|
def _install_by_hash(
|
||||||
self,
|
self, pkg_hash: str, pkg_sha256: str, bincache_platform: spack.platforms.Platform
|
||||||
pkg_hash: str,
|
|
||||||
pkg_sha256: str,
|
|
||||||
index: List[spack.spec.Spec],
|
|
||||||
bincache_platform: spack.platforms.Platform,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
|
||||||
# Reconstruct the compiler that we need to use for bootstrapping
|
|
||||||
compiler_entry = {
|
|
||||||
"modules": [],
|
|
||||||
"operating_system": str(index_spec.os),
|
|
||||||
"paths": {
|
|
||||||
"cc": "/dev/null",
|
|
||||||
"cxx": "/dev/null",
|
|
||||||
"f77": "/dev/null",
|
|
||||||
"fc": "/dev/null",
|
|
||||||
},
|
|
||||||
"spec": str(index_spec.compiler),
|
|
||||||
"target": str(index_spec.target.family),
|
|
||||||
}
|
|
||||||
with spack.platforms.use_platform(bincache_platform):
|
with spack.platforms.use_platform(bincache_platform):
|
||||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||||
spec_str = "/" + pkg_hash
|
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
spack.binary_distribution.install_root_node(
|
||||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||||
for match in matches:
|
)
|
||||||
spack.binary_distribution.install_root_node(
|
|
||||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
|
||||||
)
|
|
||||||
|
|
||||||
def _install_and_test(
|
def _install_and_test(
|
||||||
self,
|
self,
|
||||||
@@ -232,7 +211,7 @@ def _install_and_test(
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||||
|
|
||||||
info: ConfigDictionary = {}
|
info: ConfigDictionary = {}
|
||||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||||
@@ -291,10 +270,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
with spack_python_interpreter():
|
with spack_python_interpreter():
|
||||||
# Add hint to use frontend operating system on Cray
|
# Add hint to use frontend operating system on Cray
|
||||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
|
||||||
# only when bootstrapping from sources on Python 3.12
|
|
||||||
if spec_for_current_python() == "python@3.12":
|
|
||||||
concrete_spec.constrain("+force_setuptools")
|
|
||||||
|
|
||||||
if module == "clingo":
|
if module == "clingo":
|
||||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||||
@@ -559,6 +534,41 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_winsdk_external_or_raise() -> None:
|
||||||
|
"""Ensure the Windows SDK + WGL are available on system
|
||||||
|
If both of these package are found, the Spack user or bootstrap
|
||||||
|
configuration (depending on where Spack is running)
|
||||||
|
will be updated to include all versions and variants detected.
|
||||||
|
If either the WDK or WSDK are not found, this method will raise
|
||||||
|
a RuntimeError.
|
||||||
|
|
||||||
|
**NOTE:** This modifies the Spack config in the current scope,
|
||||||
|
either user or environment depending on the calling context.
|
||||||
|
This is different from all other current bootstrap dependency
|
||||||
|
checks.
|
||||||
|
"""
|
||||||
|
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||||
|
return
|
||||||
|
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||||
|
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||||
|
missing_packages_lst = []
|
||||||
|
if "wgl" not in externals:
|
||||||
|
missing_packages_lst.append("wgl")
|
||||||
|
if "win-sdk" not in externals:
|
||||||
|
missing_packages_lst.append("win-sdk")
|
||||||
|
missing_packages = " & ".join(missing_packages_lst)
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Unable to find the {missing_packages}, please install these packages \
|
||||||
|
via the Visual Studio installer \
|
||||||
|
before proceeding with Spack or provide the path to a non standard install with \
|
||||||
|
'spack external find --path'"
|
||||||
|
)
|
||||||
|
# wgl/sdk are not required for bootstrapping Spack, but
|
||||||
|
# are required for building anything non trivial
|
||||||
|
# add to user config so they can be used by subsequent Spack ops
|
||||||
|
spack.detection.update_configuration(externals, buildable=False)
|
||||||
|
|
||||||
|
|
||||||
def ensure_core_dependencies() -> None:
|
def ensure_core_dependencies() -> None:
|
||||||
"""Ensure the presence of all the core dependencies."""
|
"""Ensure the presence of all the core dependencies."""
|
||||||
if sys.platform.lower() == "linux":
|
if sys.platform.lower() == "linux":
|
||||||
|
|||||||
@@ -3,13 +3,11 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||||
import glob
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
from typing import Iterable, List
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -28,6 +26,16 @@
|
|||||||
class BootstrapEnvironment(spack.environment.Environment):
|
class BootstrapEnvironment(spack.environment.Environment):
|
||||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
if not self.spack_yaml().exists():
|
||||||
|
self._write_spack_yaml_file()
|
||||||
|
super().__init__(self.environment_root())
|
||||||
|
|
||||||
|
# Remove python package roots created before python-venv was introduced
|
||||||
|
for s in self.concrete_roots():
|
||||||
|
if "python" in s.package.extendees and not s.dependencies("python-venv"):
|
||||||
|
self.deconcretize(s)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_dev_requirements(cls) -> List[str]:
|
def spack_dev_requirements(cls) -> List[str]:
|
||||||
"""Spack development requirements"""
|
"""Spack development requirements"""
|
||||||
@@ -59,31 +67,19 @@ def view_root(cls) -> pathlib.Path:
|
|||||||
return cls.environment_root().joinpath("view")
|
return cls.environment_root().joinpath("view")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pythonpaths(cls) -> List[str]:
|
def bin_dir(cls) -> pathlib.Path:
|
||||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
|
||||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
|
||||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
|
||||||
result = glob.glob(glob_expr)
|
|
||||||
if not result:
|
|
||||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
|
||||||
warnings.warn(msg)
|
|
||||||
return result
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def bin_dirs(cls) -> List[pathlib.Path]:
|
|
||||||
"""Paths to be added to PATH"""
|
"""Paths to be added to PATH"""
|
||||||
return [cls.view_root().joinpath("bin")]
|
return cls.view_root().joinpath("bin")
|
||||||
|
|
||||||
|
def python_dirs(self) -> Iterable[pathlib.Path]:
|
||||||
|
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
|
||||||
|
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_yaml(cls) -> pathlib.Path:
|
def spack_yaml(cls) -> pathlib.Path:
|
||||||
"""Environment spack.yaml file"""
|
"""Environment spack.yaml file"""
|
||||||
return cls.environment_root().joinpath("spack.yaml")
|
return cls.environment_root().joinpath("spack.yaml")
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
if not self.spack_yaml().exists():
|
|
||||||
self._write_spack_yaml_file()
|
|
||||||
super().__init__(self.environment_root())
|
|
||||||
|
|
||||||
def update_installations(self) -> None:
|
def update_installations(self) -> None:
|
||||||
"""Update the installations of this environment."""
|
"""Update the installations of this environment."""
|
||||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||||
@@ -100,21 +96,13 @@ def update_installations(self) -> None:
|
|||||||
self.install_all()
|
self.install_all()
|
||||||
self.write(regenerate=True)
|
self.write(regenerate=True)
|
||||||
|
|
||||||
def update_syspath_and_environ(self) -> None:
|
def load(self) -> None:
|
||||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
"""Update PATH and sys.path."""
|
||||||
the environment view.
|
# Make executables available (shouldn't need PYTHONPATH)
|
||||||
"""
|
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
|
||||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
|
||||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
# Spack itself imports pytest
|
||||||
# the performance of the current interpreter
|
sys.path.extend(str(p) for p in self.python_dirs())
|
||||||
sys.path.extend(self.pythonpaths())
|
|
||||||
os.environ["PATH"] = os.pathsep.join(
|
|
||||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
|
||||||
)
|
|
||||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
|
||||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
|
||||||
+ [str(x) for x in self.pythonpaths()]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _write_spack_yaml_file(self) -> None:
|
def _write_spack_yaml_file(self) -> None:
|
||||||
tty.msg(
|
tty.msg(
|
||||||
@@ -164,4 +152,4 @@ def ensure_environment_dependencies() -> None:
|
|||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
with BootstrapEnvironment() as env:
|
with BootstrapEnvironment() as env:
|
||||||
env.update_installations()
|
env.update_installations()
|
||||||
env.update_syspath_and_environ()
|
env.load()
|
||||||
|
|||||||
@@ -43,7 +43,7 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import List, Tuple
|
from typing import List, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -57,8 +57,10 @@
|
|||||||
import spack.build_systems.meson
|
import spack.build_systems.meson
|
||||||
import spack.build_systems.python
|
import spack.build_systems.python
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.compilers
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
|
import spack.error
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@@ -66,6 +68,7 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
@@ -78,7 +81,7 @@
|
|||||||
from spack.installer import InstallError
|
from spack.installer import InstallError
|
||||||
from spack.util.cpus import determine_number_of_jobs
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
SYSTEM_DIRS,
|
SYSTEM_DIR_CASE_ENTRY,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
env_flag,
|
env_flag,
|
||||||
filter_system_paths,
|
filter_system_paths,
|
||||||
@@ -101,9 +104,13 @@
|
|||||||
# Spack's compiler wrappers.
|
# Spack's compiler wrappers.
|
||||||
#
|
#
|
||||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||||
|
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||||
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
||||||
|
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
|
||||||
|
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
|
||||||
|
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
|
||||||
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
||||||
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
||||||
SPACK_PREFIX = "SPACK_PREFIX"
|
SPACK_PREFIX = "SPACK_PREFIX"
|
||||||
@@ -416,7 +423,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
|
|
||||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||||
|
|
||||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||||
|
|
||||||
compiler.setup_custom_environment(pkg, env)
|
compiler.setup_custom_environment(pkg, env)
|
||||||
|
|
||||||
@@ -544,9 +551,26 @@ def update_compiler_args_for_dep(dep):
|
|||||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||||
|
|
||||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
|
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
|
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
spack_managed_dirs: Set[str] = {
|
||||||
|
spack.stage.get_stage_root(),
|
||||||
|
spack.store.STORE.db.root,
|
||||||
|
*(db.root for db in spack.store.STORE.db.upstream_dbs),
|
||||||
|
}
|
||||||
|
spack_managed_dirs.update([os.path.realpath(p) for p in spack_managed_dirs])
|
||||||
|
|
||||||
|
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in sorted(spack_managed_dirs)))
|
||||||
|
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
|
||||||
|
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
|
||||||
|
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
|
||||||
|
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
|
||||||
|
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
|
||||||
|
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
|
||||||
|
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
|
||||||
|
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
|
||||||
|
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
|
||||||
|
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
|
||||||
|
|
||||||
|
|
||||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||||
@@ -583,10 +607,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
# Put spack compiler paths in module scope. (Some packages use it
|
# Put spack compiler paths in module scope. (Some packages use it
|
||||||
# in setup_run_environment etc, so don't put it context == build)
|
# in setup_run_environment etc, so don't put it context == build)
|
||||||
link_dir = spack.paths.build_env_path
|
link_dir = spack.paths.build_env_path
|
||||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
pkg_compiler = None
|
||||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
try:
|
||||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
pkg_compiler = pkg.compiler
|
||||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
except spack.compilers.NoCompilerForSpecError as e:
|
||||||
|
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||||
|
|
||||||
|
if pkg_compiler is not None:
|
||||||
|
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||||
|
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||||
|
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||||
|
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||||
|
else:
|
||||||
|
module.spack_cc = None
|
||||||
|
module.spack_cxx = None
|
||||||
|
module.spack_f77 = None
|
||||||
|
module.spack_fc = None
|
||||||
|
|
||||||
# Useful directories within the prefix are encapsulated in
|
# Useful directories within the prefix are encapsulated in
|
||||||
# a Prefix object.
|
# a Prefix object.
|
||||||
@@ -789,7 +825,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in ["cray-mpich", "cray-libsci"]:
|
for mod in ["cray-mpich", "cray-libsci"]:
|
||||||
module("unload", mod)
|
module("unload", mod)
|
||||||
|
|
||||||
if target.module_name:
|
if target and target.module_name:
|
||||||
load_module(target.module_name)
|
load_module(target.module_name)
|
||||||
|
|
||||||
load_external_modules(pkg)
|
load_external_modules(pkg)
|
||||||
|
|||||||
@@ -434,11 +434,6 @@ def _do_patch_libtool(self):
|
|||||||
r"crtendS\.o",
|
r"crtendS\.o",
|
||||||
]:
|
]:
|
||||||
x.filter(regex=(rehead + o), repl="")
|
x.filter(regex=(rehead + o), repl="")
|
||||||
elif self.pkg.compiler.name == "dpcpp":
|
|
||||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
|
||||||
# (see https://github.com/spack/spack/issues/32863):
|
|
||||||
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
|
||||||
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
|
||||||
elif self.pkg.compiler.name == "nag":
|
elif self.pkg.compiler.name == "nag":
|
||||||
for tag in ["fc", "f77"]:
|
for tag in ["fc", "f77"]:
|
||||||
marker = markers[tag]
|
marker = markers[tag]
|
||||||
@@ -541,7 +536,7 @@ def autoreconf(self, pkg, spec, prefix):
|
|||||||
if os.path.exists(self.configure_abs_path):
|
if os.path.exists(self.configure_abs_path):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Else try to regenerate it, which reuquires a few build dependencies
|
# Else try to regenerate it, which requires a few build dependencies
|
||||||
ensure_build_dependencies_or_raise(
|
ensure_build_dependencies_or_raise(
|
||||||
spec=spec,
|
spec=spec,
|
||||||
dependencies=["autoconf", "automake", "libtool"],
|
dependencies=["autoconf", "automake", "libtool"],
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -15,6 +16,12 @@
|
|||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
|
|
||||||
|
def spec_uses_toolchain(spec):
|
||||||
|
gcc_toolchain_regex = re.compile(".*gcc-toolchain.*")
|
||||||
|
using_toolchain = list(filter(gcc_toolchain_regex.match, spec.compiler_flags["cxxflags"]))
|
||||||
|
return using_toolchain
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_path(name, value, comment="", force=False):
|
def cmake_cache_path(name, value, comment="", force=False):
|
||||||
"""Generate a string for a cmake cache variable"""
|
"""Generate a string for a cmake cache variable"""
|
||||||
force_str = " FORCE" if force else ""
|
force_str = " FORCE" if force else ""
|
||||||
@@ -213,7 +220,7 @@ def initconfig_mpi_entries(self):
|
|||||||
else:
|
else:
|
||||||
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
||||||
# vs the older versions which expect MPIEXEC
|
# vs the older versions which expect MPIEXEC
|
||||||
if self.pkg.spec["cmake"].satisfies("@3.10:"):
|
if spec["cmake"].satisfies("@3.10:"):
|
||||||
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
||||||
else:
|
else:
|
||||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||||
@@ -248,12 +255,17 @@ def initconfig_hardware_entries(self):
|
|||||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||||
|
|
||||||
archs = spec.variants["cuda_arch"].value
|
# CUDA_FLAGS
|
||||||
if archs[0] != "none":
|
cuda_flags = []
|
||||||
arch_str = ";".join(archs)
|
|
||||||
entries.append(
|
if not spec.satisfies("cuda_arch=none"):
|
||||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
cuda_archs = ";".join(spec.variants["cuda_arch"].value)
|
||||||
)
|
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_archs))
|
||||||
|
|
||||||
|
if spec_uses_toolchain(spec):
|
||||||
|
cuda_flags.append("-Xcompiler {}".format(spec_uses_toolchain(spec)[0]))
|
||||||
|
|
||||||
|
entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
|
||||||
|
|
||||||
if "+rocm" in spec:
|
if "+rocm" in spec:
|
||||||
entries.append("#------------------{0}".format("-" * 30))
|
entries.append("#------------------{0}".format("-" * 30))
|
||||||
@@ -262,9 +274,6 @@ def initconfig_hardware_entries(self):
|
|||||||
|
|
||||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||||
entries.append(
|
|
||||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
|
||||||
)
|
|
||||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||||
@@ -277,11 +286,9 @@ def initconfig_hardware_entries(self):
|
|||||||
archs = self.spec.variants["amdgpu_target"].value
|
archs = self.spec.variants["amdgpu_target"].value
|
||||||
if archs[0] != "none":
|
if archs[0] != "none":
|
||||||
arch_str = ";".join(archs)
|
arch_str = ";".join(archs)
|
||||||
entries.append(
|
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||||
)
|
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
|
||||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
|
|
||||||
class CargoPackage(spack.package_base.PackageBase):
|
class CargoPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using a Makefiles."""
|
"""Specialized class for packages built using cargo."""
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
|
|||||||
@@ -39,16 +39,11 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
|||||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||||
``find_python_hints`` for context."""
|
``find_python_hints`` for context."""
|
||||||
if not getattr(pkg, "find_python_hints", False):
|
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
||||||
|
"python", dt.BUILD | dt.LINK
|
||||||
|
):
|
||||||
return
|
return
|
||||||
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
python_executable = pkg.spec["python"].command.path
|
||||||
if len(pythons) != 1:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
python_executable = pythons[0].package.command.path
|
|
||||||
except RuntimeError:
|
|
||||||
return
|
|
||||||
|
|
||||||
args.extend(
|
args.extend(
|
||||||
[
|
[
|
||||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||||
|
|||||||
144
lib/spack/spack/build_systems/compiler.py
Normal file
144
lib/spack/spack/build_systems/compiler.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from typing import Dict, List, Sequence, Tuple, Union
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import classproperty
|
||||||
|
|
||||||
|
import spack.compiler
|
||||||
|
import spack.package_base
|
||||||
|
|
||||||
|
# Local "type" for type hints
|
||||||
|
Path = Union[str, pathlib.Path]
|
||||||
|
|
||||||
|
|
||||||
|
class CompilerPackage(spack.package_base.PackageBase):
|
||||||
|
"""A Package mixin for all common logic for packages that implement compilers"""
|
||||||
|
|
||||||
|
# TODO: how do these play nicely with other tags
|
||||||
|
tags: Sequence[str] = ["compiler"]
|
||||||
|
|
||||||
|
#: Optional suffix regexes for searching for this type of compiler.
|
||||||
|
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||||
|
#: version suffix for gcc.
|
||||||
|
compiler_suffixes: List[str] = [r"-.*"]
|
||||||
|
|
||||||
|
#: Optional prefix regexes for searching for this compiler
|
||||||
|
compiler_prefixes: List[str] = []
|
||||||
|
|
||||||
|
#: Compiler argument(s) that produces version information
|
||||||
|
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||||
|
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||||
|
|
||||||
|
#: Regex used to extract version from compiler's output
|
||||||
|
compiler_version_regex: str = "(.*)"
|
||||||
|
|
||||||
|
#: Static definition of languages supported by this class
|
||||||
|
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||||
|
|
||||||
|
def __init__(self, spec: "spack.spec.Spec"):
|
||||||
|
super().__init__(spec)
|
||||||
|
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||||
|
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
||||||
|
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_languages(self) -> Sequence[str]:
|
||||||
|
"""Dynamic definition of languages supported by this package"""
|
||||||
|
return self.compiler_languages
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def compiler_names(cls) -> Sequence[str]:
|
||||||
|
"""Construct list of compiler names from per-language names"""
|
||||||
|
names = []
|
||||||
|
for language in cls.compiler_languages:
|
||||||
|
names.extend(getattr(cls, f"{language}_names"))
|
||||||
|
return names
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def executables(cls) -> Sequence[str]:
|
||||||
|
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
||||||
|
regexp_fmt = r"^({0}){1}({2})$"
|
||||||
|
prefixes = [""] + cls.compiler_prefixes
|
||||||
|
suffixes = [""] + cls.compiler_suffixes
|
||||||
|
if sys.platform == "win32":
|
||||||
|
ext = r"\.(?:exe|bat)"
|
||||||
|
suffixes += [suf + ext for suf in suffixes]
|
||||||
|
return [
|
||||||
|
regexp_fmt.format(prefix, re.escape(name), suffix)
|
||||||
|
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_version(cls, exe: Path):
|
||||||
|
version_argument = cls.compiler_version_argument
|
||||||
|
if isinstance(version_argument, str):
|
||||||
|
version_argument = (version_argument,)
|
||||||
|
|
||||||
|
for va in version_argument:
|
||||||
|
try:
|
||||||
|
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||||
|
match = re.search(cls.compiler_version_regex, output)
|
||||||
|
if match:
|
||||||
|
return ".".join(match.groups())
|
||||||
|
except spack.util.executable.ProcessError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
tty.debug(
|
||||||
|
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||||
|
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||||
|
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||||
|
return os.path.join(prefix, "bin")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||||
|
"""Compute the paths to compiler executables associated with this package
|
||||||
|
|
||||||
|
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
||||||
|
to include with each spec object."""
|
||||||
|
# There are often at least two copies (not symlinks) of each compiler executable in the
|
||||||
|
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
||||||
|
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
||||||
|
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
||||||
|
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
||||||
|
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
||||||
|
# order:
|
||||||
|
# First pass over languages identifies exes that are perfect matches for canonical names
|
||||||
|
# Second pass checks for names with prefix/suffix
|
||||||
|
# Second pass is sorted by language name length because longer named languages
|
||||||
|
# e.g. cxx can often contain the names of shorter named languages
|
||||||
|
# e.g. c (e.g. clang/clang++)
|
||||||
|
paths = {}
|
||||||
|
exes = sorted(exes, reverse=True)
|
||||||
|
languages = {
|
||||||
|
lang: getattr(cls, f"{lang}_names")
|
||||||
|
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
||||||
|
}
|
||||||
|
for exe in exes:
|
||||||
|
for lang, names in languages.items():
|
||||||
|
if os.path.basename(exe) in names:
|
||||||
|
paths[lang] = exe
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
for lang, names in languages.items():
|
||||||
|
if any(name in os.path.basename(exe) for name in names):
|
||||||
|
paths[lang] = exe
|
||||||
|
break
|
||||||
|
|
||||||
|
return paths
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||||
|
# path determination is separated so it can be reused in subclasses
|
||||||
|
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||||
@@ -21,7 +21,7 @@
|
|||||||
|
|
||||||
|
|
||||||
class MakefilePackage(spack.package_base.PackageBase):
|
class MakefilePackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using a Makefiles."""
|
"""Specialized class for packages built using Makefiles."""
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
|
|||||||
@@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts += self.nmake_install_args()
|
opts += self.nmake_install_args()
|
||||||
if self.makefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
opts.append(self.define("PREFIX", prefix))
|
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
from llnl.util.link_tree import LinkTree
|
from llnl.util.link_tree import LinkTree
|
||||||
|
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.directives import conflicts, variant
|
from spack.directives import conflicts, license, redistribute, variant
|
||||||
from spack.package_base import InstallError
|
from spack.package_base import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
@@ -26,10 +26,11 @@ class IntelOneApiPackage(Package):
|
|||||||
"""Base class for Intel oneAPI packages."""
|
"""Base class for Intel oneAPI packages."""
|
||||||
|
|
||||||
homepage = "https://software.intel.com/oneapi"
|
homepage = "https://software.intel.com/oneapi"
|
||||||
|
license("https://intel.ly/393CijO")
|
||||||
|
|
||||||
# oneAPI license does not allow mirroring outside of the
|
# oneAPI license does not allow mirroring outside of the
|
||||||
# organization (e.g. University/Company).
|
# organization (e.g. University/Company).
|
||||||
redistribute_source = False
|
redistribute(source=False, binary=False)
|
||||||
|
|
||||||
for c in [
|
for c in [
|
||||||
"target=ppc64:",
|
"target=ppc64:",
|
||||||
|
|||||||
@@ -4,12 +4,15 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
from llnl.util.filesystem import filter_file
|
from llnl.util.filesystem import filter_file, find
|
||||||
|
from llnl.util.lang import memoized
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
|
from spack.install_test import SkipTest, test_part
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
from ._checks import BaseBuilder, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
@@ -28,6 +31,58 @@ class PerlPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
extends("perl", when="build_system=perl")
|
extends("perl", when="build_system=perl")
|
||||||
|
|
||||||
|
@property
|
||||||
|
@memoized
|
||||||
|
def _platform_dir(self):
|
||||||
|
"""Name of platform-specific module subdirectory."""
|
||||||
|
perl = self.spec["perl"].command
|
||||||
|
options = "-E", "use Config; say $Config{archname}"
|
||||||
|
out = perl(*options, output=str.split, error=str.split)
|
||||||
|
return out.strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def use_modules(self) -> Iterable[str]:
|
||||||
|
"""Names of the package's perl modules."""
|
||||||
|
module_files = find(self.prefix.lib, ["*.pm"], recursive=True)
|
||||||
|
|
||||||
|
# Drop the platform directory, if present
|
||||||
|
if self._platform_dir:
|
||||||
|
platform_dir = self._platform_dir + os.sep
|
||||||
|
module_files = [m.replace(platform_dir, "") for m in module_files]
|
||||||
|
|
||||||
|
# Drop the extension and library path
|
||||||
|
prefix = self.prefix.lib + os.sep
|
||||||
|
modules = [os.path.splitext(m)[0].replace(prefix, "") for m in module_files]
|
||||||
|
|
||||||
|
# Drop the perl subdirectory as well
|
||||||
|
return ["::".join(m.split(os.sep)[1:]) for m in modules]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def skip_modules(self) -> Iterable[str]:
|
||||||
|
"""Names of modules that should be skipped when running tests.
|
||||||
|
|
||||||
|
These are a subset of use_modules.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of strings of module names.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def test_use(self):
|
||||||
|
"""Test 'use module'"""
|
||||||
|
if not self.use_modules:
|
||||||
|
raise SkipTest("Test requires use_modules package property.")
|
||||||
|
|
||||||
|
perl = self.spec["perl"].command
|
||||||
|
for module in self.use_modules:
|
||||||
|
if module in self.skip_modules:
|
||||||
|
continue
|
||||||
|
|
||||||
|
with test_part(self, f"test_use-{module}", purpose=f"checking use of {module}"):
|
||||||
|
options = ["-we", f'use strict; use {module}; print("OK\n")']
|
||||||
|
out = perl(*options, output=str.split, error=str.split)
|
||||||
|
assert "OK" in out
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("perl")
|
@spack.builder.builder("perl")
|
||||||
class PerlBuilder(BaseBuilder):
|
class PerlBuilder(BaseBuilder):
|
||||||
@@ -52,7 +107,7 @@ class PerlBuilder(BaseBuilder):
|
|||||||
phases = ("configure", "build", "install")
|
phases = ("configure", "build", "install")
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = ("configure_args", "check")
|
legacy_methods = ("configure_args", "check", "test_use")
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.directives import build_system, depends_on, extends, maintainers
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import test_part
|
from spack.install_test import test_part
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@@ -56,8 +56,6 @@ def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
|||||||
|
|
||||||
|
|
||||||
class PythonExtension(spack.package_base.PackageBase):
|
class PythonExtension(spack.package_base.PackageBase):
|
||||||
maintainers("adamjstewart")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self) -> Iterable[str]:
|
def import_modules(self) -> Iterable[str]:
|
||||||
"""Names of modules that the Python package provides.
|
"""Names of modules that the Python package provides.
|
||||||
@@ -140,16 +138,21 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
return conflicts
|
return conflicts
|
||||||
|
|
||||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||||
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||||
if not self.extendee_spec or self.extendee_spec.external:
|
# view.
|
||||||
|
if not self.extendee_spec:
|
||||||
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
|
||||||
|
if python.external:
|
||||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
# We only patch shebangs in the bin directory.
|
# We only patch shebangs in the bin directory.
|
||||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
python_prefix = self.extendee_spec.prefix
|
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if skip_if_exists and os.path.lexists(dst):
|
if skip_if_exists and os.path.lexists(dst):
|
||||||
continue
|
continue
|
||||||
@@ -170,7 +173,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
|||||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||||
shutil.copy2(src, dst)
|
shutil.copy2(src, dst)
|
||||||
fs.filter_file(
|
fs.filter_file(
|
||||||
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
view.link(src, dst)
|
view.link(src, dst)
|
||||||
@@ -201,14 +204,13 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
ignore_namespace = True
|
ignore_namespace = True
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
|
||||||
|
|
||||||
to_remove = []
|
to_remove = []
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if ignore_namespace and namespace_init(dst):
|
if ignore_namespace and namespace_init(dst):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||||
to_remove.append(dst)
|
to_remove.append(dst)
|
||||||
else:
|
else:
|
||||||
os.remove(dst)
|
os.remove(dst)
|
||||||
@@ -364,6 +366,12 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
|||||||
return f"https://pypi.org/simple/{name}/"
|
return f"https://pypi.org/simple/{name}/"
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_spec(self):
|
||||||
|
"""Get python-venv if it exists or python otherwise."""
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
return python
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self) -> HeaderList:
|
def headers(self) -> HeaderList:
|
||||||
"""Discover header files in platlib."""
|
"""Discover header files in platlib."""
|
||||||
@@ -373,8 +381,9 @@ def headers(self) -> HeaderList:
|
|||||||
|
|
||||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
python = self.python_spec
|
||||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||||
|
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||||
|
|
||||||
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
||||||
headers = functools.reduce(operator.add, headers_list)
|
headers = functools.reduce(operator.add, headers_list)
|
||||||
@@ -393,8 +402,9 @@ def libs(self) -> LibraryList:
|
|||||||
name = self.spec.name[3:]
|
name = self.spec.name[3:]
|
||||||
|
|
||||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
python = self.python_spec
|
||||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||||
|
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||||
|
|
||||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||||
@@ -506,6 +516,8 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
|||||||
|
|
||||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
|
pip = spec["python"].command
|
||||||
|
pip.add_default_arg("-m", "pip")
|
||||||
|
|
||||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||||
|
|
||||||
@@ -521,14 +533,6 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
|||||||
else:
|
else:
|
||||||
args.append(".")
|
args.append(".")
|
||||||
|
|
||||||
pip = spec["python"].command
|
|
||||||
# Hide user packages, since we don't have build isolation. This is
|
|
||||||
# necessary because pip / setuptools may run hooks from arbitrary
|
|
||||||
# packages during the build. There is no equivalent variable to hide
|
|
||||||
# system packages, so this is not reliable for external Python.
|
|
||||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
|
||||||
pip.add_default_arg("-m")
|
|
||||||
pip.add_default_arg("pip")
|
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
|
|||||||
@@ -75,9 +75,12 @@
|
|||||||
# does not like its directory structure.
|
# does not like its directory structure.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.directives import conflicts, depends_on, variant
|
from spack.directives import conflicts, depends_on, variant
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
|
||||||
|
|
||||||
class ROCmPackage(PackageBase):
|
class ROCmPackage(PackageBase):
|
||||||
@@ -154,6 +157,25 @@ def hip_flags(amdgpu_target):
|
|||||||
archs = ",".join(amdgpu_target)
|
archs = ",".join(amdgpu_target)
|
||||||
return "--amdgpu-target={0}".format(archs)
|
return "--amdgpu-target={0}".format(archs)
|
||||||
|
|
||||||
|
def asan_on(self, env: EnvironmentModifications):
|
||||||
|
llvm_path = self.spec["llvm-amdgpu"].prefix
|
||||||
|
env.set("CC", llvm_path + "/bin/clang")
|
||||||
|
env.set("CXX", llvm_path + "/bin/clang++")
|
||||||
|
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||||
|
|
||||||
|
for root, _, files in os.walk(llvm_path):
|
||||||
|
if "libclang_rt.asan-x86_64.so" in files:
|
||||||
|
asan_lib_path = root
|
||||||
|
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||||
|
if "rhel" in self.spec.os or "sles" in self.spec.os:
|
||||||
|
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||||
|
else:
|
||||||
|
SET_DWARF_VERSION_4 = ""
|
||||||
|
|
||||||
|
env.set("CFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||||
|
env.set("CXXFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||||
|
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||||
|
|
||||||
# HIP version vs Architecture
|
# HIP version vs Architecture
|
||||||
|
|
||||||
# TODO: add a bunch of lines like:
|
# TODO: add a bunch of lines like:
|
||||||
|
|||||||
@@ -9,6 +9,8 @@
|
|||||||
import inspect
|
import inspect
|
||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
from llnl.util import lang
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
|
|
||||||
#: Builder classes, as registered by the "builder" decorator
|
#: Builder classes, as registered by the "builder" decorator
|
||||||
@@ -231,24 +233,27 @@ def __new__(mcs, name, bases, attr_dict):
|
|||||||
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||||
staged_callbacks = temporary_stage.callbacks
|
staged_callbacks = temporary_stage.callbacks
|
||||||
|
|
||||||
# We don't have callbacks in this class, move on
|
# Here we have an adapter from an old-style package. This means there is no
|
||||||
if not staged_callbacks:
|
# hierarchy of builders, and every callback that had to be combined between
|
||||||
|
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||||
|
if name == "Adapter":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# If we are here we have callbacks. To get a complete list, get first what
|
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||||
# was attached to parent classes, then prepend what we have registered here.
|
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||||
|
# registered here.
|
||||||
#
|
#
|
||||||
# The order should be:
|
# The order should be:
|
||||||
# 1. Callbacks are registered in order within the same class
|
# 1. Callbacks are registered in order within the same class
|
||||||
# 2. Callbacks defined in derived classes precede those defined in base
|
# 2. Callbacks defined in derived classes precede those defined in base
|
||||||
# classes
|
# classes
|
||||||
|
callbacks_from_base = []
|
||||||
for base in bases:
|
for base in bases:
|
||||||
callbacks_from_base = getattr(base, temporary_stage.attribute_name, None)
|
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||||
if callbacks_from_base:
|
if not current_callbacks:
|
||||||
break
|
continue
|
||||||
else:
|
callbacks_from_base.extend(current_callbacks)
|
||||||
callbacks_from_base = []
|
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||||
|
|
||||||
# Set the callbacks in this class and flush the temporary stage
|
# Set the callbacks in this class and flush the temporary stage
|
||||||
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||||
del temporary_stage.callbacks[:]
|
del temporary_stage.callbacks[:]
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -334,8 +334,7 @@ def display_specs(specs, args=None, **kwargs):
|
|||||||
variants (bool): Show variants with specs
|
variants (bool): Show variants with specs
|
||||||
indent (int): indent each line this much
|
indent (int): indent each line this much
|
||||||
groups (bool): display specs grouped by arch/compiler (default True)
|
groups (bool): display specs grouped by arch/compiler (default True)
|
||||||
decorators (dict): dictionary mappng specs to decorators
|
decorator (typing.Callable): function to call to decorate specs
|
||||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
|
||||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||||
|
|
||||||
@@ -384,15 +383,13 @@ def get_arg(name, default=None):
|
|||||||
vfmt = "{variants}" if variants else ""
|
vfmt = "{variants}" if variants else ""
|
||||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||||
|
|
||||||
transform = {"package": decorator, "fullpackage": decorator}
|
|
||||||
|
|
||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
string = ""
|
string = ""
|
||||||
if hashes:
|
if hashes:
|
||||||
string += gray_hash(s, hlen) + " "
|
string += gray_hash(s, hlen) + " "
|
||||||
string += depth * " "
|
string += depth * " "
|
||||||
string += s.cformat(format_string, transform=transform)
|
string += decorator(s, s.cformat(format_string))
|
||||||
return string
|
return string
|
||||||
|
|
||||||
def format_list(specs):
|
def format_list(specs):
|
||||||
@@ -451,7 +448,7 @@ def filter_loaded_specs(specs):
|
|||||||
return [x for x in specs if x.dag_hash() in hashes]
|
return [x for x in specs if x.dag_hash() in hashes]
|
||||||
|
|
||||||
|
|
||||||
def print_how_many_pkgs(specs, pkg_type=""):
|
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
|
||||||
"""Given a list of specs, this will print a message about how many
|
"""Given a list of specs, this will print a message about how many
|
||||||
specs are in that list.
|
specs are in that list.
|
||||||
|
|
||||||
@@ -462,7 +459,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
|||||||
category, e.g. if pkg_type is "installed" then the message
|
category, e.g. if pkg_type is "installed" then the message
|
||||||
would be "3 installed packages"
|
would be "3 installed packages"
|
||||||
"""
|
"""
|
||||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
|
||||||
|
|
||||||
|
|
||||||
def spack_is_git_repo():
|
def spack_is_git_repo():
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ def externals(parser, args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug)
|
||||||
_process_reports(reports)
|
_process_reports(reports)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -133,6 +133,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
||||||
"runtime dependencies",
|
"runtime dependencies",
|
||||||
)
|
)
|
||||||
|
push.add_argument(
|
||||||
|
"--private",
|
||||||
|
action="store_true",
|
||||||
|
help="for a private mirror, include non-redistributable packages",
|
||||||
|
)
|
||||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||||
push.set_defaults(func=push_fn)
|
push.set_defaults(func=push_fn)
|
||||||
|
|
||||||
@@ -275,23 +280,37 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
|
|
||||||
# Sync buildcache entries from one mirror to another
|
# Sync buildcache entries from one mirror to another
|
||||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||||
sync.add_argument(
|
|
||||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
sync_manifest_source = sync.add_argument_group(
|
||||||
|
"Manifest Source",
|
||||||
|
"Specify a list of build cache objects to sync using manifest file(s)."
|
||||||
|
'This option takes the place of the "source mirror" for synchronization'
|
||||||
|
'and optionally takes a "destination mirror" ',
|
||||||
)
|
)
|
||||||
sync.add_argument(
|
sync_manifest_source.add_argument(
|
||||||
|
"--manifest-glob", help="a quoted glob pattern identifying CI rebuild manifest files"
|
||||||
|
)
|
||||||
|
sync_source_mirror = sync.add_argument_group(
|
||||||
|
"Named Source",
|
||||||
|
"Specify a single registered source mirror to synchronize from. This option requires"
|
||||||
|
"the specification of a destination mirror.",
|
||||||
|
)
|
||||||
|
sync_source_mirror.add_argument(
|
||||||
"src_mirror",
|
"src_mirror",
|
||||||
metavar="source mirror",
|
metavar="source mirror",
|
||||||
type=arguments.mirror_name_or_url,
|
|
||||||
nargs="?",
|
nargs="?",
|
||||||
|
type=arguments.mirror_name_or_url,
|
||||||
help="source mirror name, path, or URL",
|
help="source mirror name, path, or URL",
|
||||||
)
|
)
|
||||||
|
|
||||||
sync.add_argument(
|
sync.add_argument(
|
||||||
"dest_mirror",
|
"dest_mirror",
|
||||||
metavar="destination mirror",
|
metavar="destination mirror",
|
||||||
type=arguments.mirror_name_or_url,
|
|
||||||
nargs="?",
|
nargs="?",
|
||||||
|
type=arguments.mirror_name_or_url,
|
||||||
help="destination mirror name, path, or URL",
|
help="destination mirror name, path, or URL",
|
||||||
)
|
)
|
||||||
|
|
||||||
sync.set_defaults(func=sync_fn)
|
sync.set_defaults(func=sync_fn)
|
||||||
|
|
||||||
# Update buildcache index without copying any additional packages
|
# Update buildcache index without copying any additional packages
|
||||||
@@ -353,6 +372,25 @@ def _make_pool() -> MaybePool:
|
|||||||
return NoPool()
|
return NoPool()
|
||||||
|
|
||||||
|
|
||||||
|
def _skip_no_redistribute_for_public(specs):
|
||||||
|
remaining_specs = list()
|
||||||
|
removed_specs = list()
|
||||||
|
for spec in specs:
|
||||||
|
if spec.package.redistribute_binary:
|
||||||
|
remaining_specs.append(spec)
|
||||||
|
else:
|
||||||
|
removed_specs.append(spec)
|
||||||
|
if removed_specs:
|
||||||
|
colified_output = tty.colify.colified(list(s.name for s in removed_specs), indent=4)
|
||||||
|
tty.debug(
|
||||||
|
"The following specs will not be added to the binary cache"
|
||||||
|
" because they cannot be redistributed:\n"
|
||||||
|
f"{colified_output}\n"
|
||||||
|
"You can use `--private` to include them."
|
||||||
|
)
|
||||||
|
return remaining_specs
|
||||||
|
|
||||||
|
|
||||||
def push_fn(args):
|
def push_fn(args):
|
||||||
"""create a binary package and push it to a mirror"""
|
"""create a binary package and push it to a mirror"""
|
||||||
if args.spec_file:
|
if args.spec_file:
|
||||||
@@ -403,6 +441,8 @@ def push_fn(args):
|
|||||||
root="package" in args.things_to_install,
|
root="package" in args.things_to_install,
|
||||||
dependencies="dependencies" in args.things_to_install,
|
dependencies="dependencies" in args.things_to_install,
|
||||||
)
|
)
|
||||||
|
if not args.private:
|
||||||
|
specs = _skip_no_redistribute_for_public(specs)
|
||||||
|
|
||||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||||
# many specs are being pushed.
|
# many specs are being pushed.
|
||||||
@@ -1070,7 +1110,17 @@ def sync_fn(args):
|
|||||||
requires an active environment in order to know which specs to sync
|
requires an active environment in order to know which specs to sync
|
||||||
"""
|
"""
|
||||||
if args.manifest_glob:
|
if args.manifest_glob:
|
||||||
manifest_copy(glob.glob(args.manifest_glob))
|
# Passing the args.src_mirror here because it is not possible to
|
||||||
|
# have the destination be required when specifying a named source
|
||||||
|
# mirror and optional for the --manifest-glob argument. In the case
|
||||||
|
# of manifest glob sync, the source mirror positional argument is the
|
||||||
|
# destination mirror if it is specified. If there are two mirrors
|
||||||
|
# specified, the second is ignored and the first is the override
|
||||||
|
# destination.
|
||||||
|
if args.dest_mirror:
|
||||||
|
tty.warn(f"Ignoring unused arguemnt: {args.dest_mirror.name}")
|
||||||
|
|
||||||
|
manifest_copy(glob.glob(args.manifest_glob), args.src_mirror)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if args.src_mirror is None or args.dest_mirror is None:
|
if args.src_mirror is None or args.dest_mirror is None:
|
||||||
@@ -1121,7 +1171,7 @@ def sync_fn(args):
|
|||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
def manifest_copy(manifest_file_list):
|
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||||
"""Read manifest files containing information about specific specs to copy
|
"""Read manifest files containing information about specific specs to copy
|
||||||
from source to destination, remove duplicates since any binary packge for
|
from source to destination, remove duplicates since any binary packge for
|
||||||
a given hash should be the same as any other, and copy all files specified
|
a given hash should be the same as any other, and copy all files specified
|
||||||
@@ -1135,10 +1185,17 @@ def manifest_copy(manifest_file_list):
|
|||||||
# Last duplicate hash wins
|
# Last duplicate hash wins
|
||||||
deduped_manifest[spec_hash] = copy_list
|
deduped_manifest[spec_hash] = copy_list
|
||||||
|
|
||||||
|
build_cache_dir = bindist.build_cache_relative_path()
|
||||||
for spec_hash, copy_list in deduped_manifest.items():
|
for spec_hash, copy_list in deduped_manifest.items():
|
||||||
for copy_file in copy_list:
|
for copy_file in copy_list:
|
||||||
tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"]))
|
dest = copy_file["dest"]
|
||||||
copy_buildcache_file(copy_file["src"], copy_file["dest"])
|
if dest_mirror:
|
||||||
|
src_relative_path = os.path.join(
|
||||||
|
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||||
|
)
|
||||||
|
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||||
|
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||||
|
copy_buildcache_file(copy_file["src"], dest)
|
||||||
|
|
||||||
|
|
||||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||||
@@ -1165,14 +1222,18 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
|||||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||||
)
|
)
|
||||||
|
|
||||||
bindist.generate_key_index(keys_url)
|
try:
|
||||||
|
bindist.generate_key_index(keys_url)
|
||||||
|
except bindist.CannotListKeys as e:
|
||||||
|
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||||
|
# does not exist. TODO: distinguish between this and other errors.
|
||||||
|
tty.warn(f"did not update the key index: {e}")
|
||||||
|
|
||||||
|
|
||||||
def update_index_fn(args):
|
def update_index_fn(args):
|
||||||
"""update a buildcache index"""
|
"""update a buildcache index"""
|
||||||
update_index(args.mirror, update_keys=args.keys)
|
return update_index(args.mirror, update_keys=args.keys)
|
||||||
|
|
||||||
|
|
||||||
def buildcache(parser, args):
|
def buildcache(parser, args):
|
||||||
if args.func:
|
return args.func(args)
|
||||||
args.func(args)
|
|
||||||
|
|||||||
@@ -183,7 +183,7 @@ def checksum(parser, args):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
if args.add_to_package:
|
if args.add_to_package:
|
||||||
add_versions_to_package(pkg, version_lines)
|
add_versions_to_package(pkg, version_lines, args.batch)
|
||||||
|
|
||||||
|
|
||||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||||
@@ -229,7 +229,7 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
|||||||
tty.die("Invalid checksums found.")
|
tty.die("Invalid checksums found.")
|
||||||
|
|
||||||
|
|
||||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||||
"""
|
"""
|
||||||
Add checksumed versions to a package's instructions and open a user's
|
Add checksumed versions to a package's instructions and open a user's
|
||||||
editor so they may double check the work of the function.
|
editor so they may double check the work of the function.
|
||||||
@@ -282,5 +282,5 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
|||||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||||
tty.msg(f"Open {filename} to review the additions.")
|
tty.msg(f"Open {filename} to review the additions.")
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty() and not is_batch:
|
||||||
editor(filename)
|
editor(filename)
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.ci as spack_ci
|
import spack.ci as spack_ci
|
||||||
|
import spack.cmd
|
||||||
import spack.cmd.buildcache as buildcache
|
import spack.cmd.buildcache as buildcache
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -30,14 +31,20 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
SPACK_COMMAND = "spack"
|
SPACK_COMMAND = "spack"
|
||||||
MAKE_COMMAND = "make"
|
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
|
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||||
|
|
||||||
|
|
||||||
def deindent(desc):
|
def deindent(desc):
|
||||||
return desc.replace(" ", "")
|
return desc.replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
|
def unicode_escape(path: str) -> str:
|
||||||
|
"""Returns transformed path with any unicode
|
||||||
|
characters replaced with their corresponding escapes"""
|
||||||
|
return path.encode("unicode-escape").decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
setup_parser.parser = subparser
|
setup_parser.parser = subparser
|
||||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||||
@@ -549,75 +556,35 @@ def ci_rebuild(args):
|
|||||||
# No hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||||
|
|
||||||
config = cfg.get("config")
|
config = cfg.get("config")
|
||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
spack_cmd.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args = []
|
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||||
|
|
||||||
# Arguments when installing dependencies from cache
|
|
||||||
deps_install_args = install_args
|
|
||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
root_install_args = install_args + [
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
"--keep-stage",
|
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||||
"--only=package",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
]
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
root_install_args.extend(cdash_handler.args())
|
root_install_args.extend(cdash_handler.args())
|
||||||
root_install_args.append(slash_hash)
|
|
||||||
|
|
||||||
# ["x", "y"] -> "'x' 'y'"
|
|
||||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
|
||||||
|
|
||||||
commands = [
|
commands = [
|
||||||
# apparently there's a race when spack bootstraps? do it up front once
|
# apparently there's a race when spack bootstraps? do it up front once
|
||||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||||
[
|
spack_cmd + deps_install_args + [slash_hash],
|
||||||
SPACK_COMMAND,
|
spack_cmd + root_install_args + [slash_hash],
|
||||||
"-e",
|
|
||||||
env.path,
|
|
||||||
"env",
|
|
||||||
"depfile",
|
|
||||||
"-o",
|
|
||||||
"Makefile",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
slash_hash, # limit to spec we're building
|
|
||||||
],
|
|
||||||
[
|
|
||||||
# --output-sync requires GNU make 4.x.
|
|
||||||
# Old make errors when you pass it a flag it doesn't recognize,
|
|
||||||
# but it doesn't error or warn when you set unrecognized flags in
|
|
||||||
# this variable.
|
|
||||||
"export",
|
|
||||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
|
||||||
],
|
|
||||||
[
|
|
||||||
MAKE_COMMAND,
|
|
||||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
|
||||||
"SPACK_COLOR=always",
|
|
||||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
|
||||||
"-j$(nproc)",
|
|
||||||
"install-deps/{}".format(
|
|
||||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
|
||||||
"{name}-{version}-{hash}"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
],
|
|
||||||
spack_cmd + ["install"] + root_install_args,
|
|
||||||
]
|
]
|
||||||
|
|
||||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||||
|
|
||||||
@@ -705,11 +672,9 @@ def ci_rebuild(args):
|
|||||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||||
|
|
||||||
# If the install succeeded, create a buildcache entry for this job spec
|
|
||||||
# and push it to one or more mirrors. If the install did not succeed,
|
|
||||||
# print out some instructions on how to reproduce this build failure
|
|
||||||
# outside of the pipeline environment.
|
|
||||||
if install_exit_code == 0:
|
if install_exit_code == 0:
|
||||||
|
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||||
|
# will result in a non-zero exit code. Pushing is best-effort.
|
||||||
mirror_urls = [buildcache_mirror_url]
|
mirror_urls = [buildcache_mirror_url]
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
# TODO: Remove this block in Spack 0.23
|
||||||
@@ -721,13 +686,12 @@ def ci_rebuild(args):
|
|||||||
destination_mirror_urls=mirror_urls,
|
destination_mirror_urls=mirror_urls,
|
||||||
sign_binaries=spack_ci.can_sign_binaries(),
|
sign_binaries=spack_ci.can_sign_binaries(),
|
||||||
):
|
):
|
||||||
msg = tty.msg if result.success else tty.warn
|
if not result.success:
|
||||||
msg(
|
install_exit_code = FAILED_CREATE_BUILDCACHE_CODE
|
||||||
"{} {} to {}".format(
|
(tty.msg if result.success else tty.error)(
|
||||||
"Pushed" if result.success else "Failed to push",
|
f'{"Pushed" if result.success else "Failed to push"} '
|
||||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
f'{job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when())} '
|
||||||
result.url,
|
f"to {result.url}"
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If this is a develop pipeline, check if the spec that we just built is
|
# If this is a develop pipeline, check if the spec that we just built is
|
||||||
@@ -748,22 +712,22 @@ def ci_rebuild(args):
|
|||||||
tty.warn(msg.format(broken_spec_path, err))
|
tty.warn(msg.format(broken_spec_path, err))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
# If the install did not succeed, print out some instructions on how to reproduce this
|
||||||
|
# build failure outside of the pipeline environment.
|
||||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||||
|
|
||||||
api_root_url = os.environ.get("CI_API_V4_URL")
|
api_root_url = os.environ.get("CI_API_V4_URL")
|
||||||
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
||||||
ci_job_id = os.environ.get("CI_JOB_ID")
|
ci_job_id = os.environ.get("CI_JOB_ID")
|
||||||
|
|
||||||
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
|
repro_job_url = f"{api_root_url}/projects/{ci_project_id}/jobs/{ci_job_id}/artifacts"
|
||||||
api_root_url, ci_project_id, ci_job_id
|
|
||||||
)
|
|
||||||
|
|
||||||
# Control characters cause this to be printed in blue so it stands out
|
# Control characters cause this to be printed in blue so it stands out
|
||||||
reproduce_msg = """
|
print(
|
||||||
|
f"""
|
||||||
|
|
||||||
\033[34mTo reproduce this build locally, run:
|
\033[34mTo reproduce this build locally, run:
|
||||||
|
|
||||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
spack ci reproduce-build {repro_job_url} [--working-dir <dir>] [--autostart]
|
||||||
|
|
||||||
If this project does not have public pipelines, you will need to first:
|
If this project does not have public pipelines, you will need to first:
|
||||||
|
|
||||||
@@ -771,12 +735,9 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
... then follow the printed instructions.\033[0;0m
|
... then follow the printed instructions.\033[0;0m
|
||||||
|
|
||||||
""".format(
|
"""
|
||||||
repro_job_url
|
|
||||||
)
|
)
|
||||||
|
|
||||||
print(reproduce_msg)
|
|
||||||
|
|
||||||
rebuild_timer.stop()
|
rebuild_timer.stop()
|
||||||
try:
|
try:
|
||||||
with open("install_timers.json", "w") as timelog:
|
with open("install_timers.json", "w") as timelog:
|
||||||
|
|||||||
@@ -563,12 +563,21 @@ def add_concretizer_args(subparser):
|
|||||||
help="reuse installed packages/buildcaches when possible",
|
help="reuse installed packages/buildcaches when possible",
|
||||||
)
|
)
|
||||||
subgroup.add_argument(
|
subgroup.add_argument(
|
||||||
|
"--fresh-roots",
|
||||||
"--reuse-deps",
|
"--reuse-deps",
|
||||||
action=ConfigSetAction,
|
action=ConfigSetAction,
|
||||||
dest="concretizer:reuse",
|
dest="concretizer:reuse",
|
||||||
const="dependencies",
|
const="dependencies",
|
||||||
default=None,
|
default=None,
|
||||||
help="reuse installed dependencies only",
|
help="concretize with fresh roots and reused dependencies",
|
||||||
|
)
|
||||||
|
subgroup.add_argument(
|
||||||
|
"--deprecated",
|
||||||
|
action=ConfigSetAction,
|
||||||
|
dest="config:deprecated",
|
||||||
|
const=True,
|
||||||
|
default=None,
|
||||||
|
help="allow concretizer to select deprecated versions",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ def compiler_find(args):
|
|||||||
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
||||||
)
|
)
|
||||||
if new_compilers:
|
if new_compilers:
|
||||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
|
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
|
||||||
n = len(new_compilers)
|
n = len(new_compilers)
|
||||||
s = "s" if n > 1 else ""
|
s = "s" if n > 1 else ""
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
arguments.add_common_arguments(subparser, ["jobs"])
|
arguments.add_common_arguments(subparser, ["jobs", "no_checksum", "spec"])
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d",
|
"-d",
|
||||||
"--source-path",
|
"--source-path",
|
||||||
@@ -34,7 +34,6 @@ def setup_parser(subparser):
|
|||||||
dest="ignore_deps",
|
dest="ignore_deps",
|
||||||
help="do not try to install dependencies of requested packages",
|
help="do not try to install dependencies of requested packages",
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--keep-prefix",
|
"--keep-prefix",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -63,7 +62,6 @@ def setup_parser(subparser):
|
|||||||
choices=["root", "all"],
|
choices=["root", "all"],
|
||||||
help="run tests on only root packages or all packages",
|
help="run tests on only root packages or all packages",
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(subparser, ["spec"])
|
|
||||||
|
|
||||||
stop_group = subparser.add_mutually_exclusive_group()
|
stop_group = subparser.add_mutually_exclusive_group()
|
||||||
stop_group.add_argument(
|
stop_group.add_argument(
|
||||||
@@ -125,9 +123,6 @@ def dev_build(self, args):
|
|||||||
if args.no_checksum:
|
if args.no_checksum:
|
||||||
spack.config.set("config:checksum", False, scope="command_line")
|
spack.config.set("config:checksum", False, scope="command_line")
|
||||||
|
|
||||||
if args.deprecated:
|
|
||||||
spack.config.set("config:deprecated", True, scope="command_line")
|
|
||||||
|
|
||||||
tests = False
|
tests = False
|
||||||
if args.test == "all":
|
if args.test == "all":
|
||||||
tests = True
|
tests = True
|
||||||
|
|||||||
@@ -9,13 +9,14 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import Optional
|
from pathlib import Path
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.string as string
|
import llnl.string as string
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common
|
import spack.cmd.common
|
||||||
@@ -44,6 +45,7 @@
|
|||||||
"deactivate",
|
"deactivate",
|
||||||
"create",
|
"create",
|
||||||
["remove", "rm"],
|
["remove", "rm"],
|
||||||
|
["rename", "mv"],
|
||||||
["list", "ls"],
|
["list", "ls"],
|
||||||
["status", "st"],
|
["status", "st"],
|
||||||
"loads",
|
"loads",
|
||||||
@@ -59,14 +61,7 @@
|
|||||||
#
|
#
|
||||||
def env_create_setup_parser(subparser):
|
def env_create_setup_parser(subparser):
|
||||||
"""create a new environment"""
|
"""create a new environment"""
|
||||||
subparser.add_argument(
|
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||||
"env_name",
|
|
||||||
metavar="env",
|
|
||||||
help=(
|
|
||||||
"name of managed environment or directory of the anonymous env "
|
|
||||||
"(when using --dir/-d) to activate"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||||
)
|
)
|
||||||
@@ -92,6 +87,9 @@ def env_create_setup_parser(subparser):
|
|||||||
default=None,
|
default=None,
|
||||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_create(args):
|
def env_create(args):
|
||||||
@@ -109,19 +107,32 @@ def env_create(args):
|
|||||||
# the environment should not include a view.
|
# the environment should not include a view.
|
||||||
with_view = None
|
with_view = None
|
||||||
|
|
||||||
|
include_concrete = None
|
||||||
|
if hasattr(args, "include_concrete"):
|
||||||
|
include_concrete = args.include_concrete
|
||||||
|
|
||||||
env = _env_create(
|
env = _env_create(
|
||||||
args.env_name,
|
args.env_name,
|
||||||
init_file=args.envfile,
|
init_file=args.envfile,
|
||||||
dir=args.dir,
|
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||||
with_view=with_view,
|
with_view=with_view,
|
||||||
keep_relative=args.keep_relative,
|
keep_relative=args.keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate views, only really useful for environments created from spack.lock files.
|
# Generate views, only really useful for environments created from spack.lock files.
|
||||||
env.regenerate_views()
|
env.regenerate_views()
|
||||||
|
|
||||||
|
|
||||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
def _env_create(
|
||||||
|
name_or_path: str,
|
||||||
|
*,
|
||||||
|
init_file: Optional[str] = None,
|
||||||
|
dir: bool = False,
|
||||||
|
with_view: Optional[str] = None,
|
||||||
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
|
):
|
||||||
"""Create a new environment, with an optional yaml description.
|
"""Create a new environment, with an optional yaml description.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -133,22 +144,31 @@ def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep
|
|||||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||||
the new environment file, otherwise they may be made absolute if the
|
the new environment file, otherwise they may be made absolute if the
|
||||||
new environment is in a different location
|
new environment is in a different location
|
||||||
|
include_concrete (list): list of the included concrete environments
|
||||||
"""
|
"""
|
||||||
if not dir:
|
if not dir:
|
||||||
env = ev.create(
|
env = ev.create(
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
name_or_path,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
tty.msg(
|
||||||
tty.msg("You can activate this environment with:")
|
colorize(
|
||||||
tty.msg(" spack env activate %s" % (name_or_path))
|
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||||
return env
|
)
|
||||||
|
)
|
||||||
env = ev.create_in_dir(
|
else:
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
env = ev.create_in_dir(
|
||||||
)
|
name_or_path,
|
||||||
tty.msg("Created environment in %s" % env.path)
|
init_file=init_file,
|
||||||
tty.msg("You can activate this environment with:")
|
with_view=with_view,
|
||||||
tty.msg(" spack env activate %s" % env.path)
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
|
)
|
||||||
|
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||||
|
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
@@ -434,6 +454,12 @@ def env_remove_setup_parser(subparser):
|
|||||||
"""remove an existing environment"""
|
"""remove an existing environment"""
|
||||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||||
|
subparser.add_argument(
|
||||||
|
"-f",
|
||||||
|
"--force",
|
||||||
|
action="store_true",
|
||||||
|
help="remove the environment even if it is included in another environment",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_remove(args):
|
def env_remove(args):
|
||||||
@@ -443,13 +469,35 @@ def env_remove(args):
|
|||||||
and manifests embedded in repositories should be removed manually.
|
and manifests embedded in repositories should be removed manually.
|
||||||
"""
|
"""
|
||||||
read_envs = []
|
read_envs = []
|
||||||
|
valid_envs = []
|
||||||
bad_envs = []
|
bad_envs = []
|
||||||
for env_name in args.rm_env:
|
invalid_envs = []
|
||||||
|
|
||||||
|
for env_name in ev.all_environment_names():
|
||||||
try:
|
try:
|
||||||
env = ev.read(env_name)
|
env = ev.read(env_name)
|
||||||
read_envs.append(env)
|
valid_envs.append(env_name)
|
||||||
|
|
||||||
|
if env_name in args.rm_env:
|
||||||
|
read_envs.append(env)
|
||||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||||
bad_envs.append(env_name)
|
invalid_envs.append(env_name)
|
||||||
|
|
||||||
|
if env_name in args.rm_env:
|
||||||
|
bad_envs.append(env_name)
|
||||||
|
|
||||||
|
# Check if env is linked to another before trying to remove
|
||||||
|
for name in valid_envs:
|
||||||
|
# don't check if environment is included to itself
|
||||||
|
if name == env_name:
|
||||||
|
continue
|
||||||
|
environ = ev.Environment(ev.root(name))
|
||||||
|
if ev.root(env_name) in environ.included_concrete_envs:
|
||||||
|
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||||
|
if args.force:
|
||||||
|
tty.warn(msg)
|
||||||
|
else:
|
||||||
|
tty.die(msg)
|
||||||
|
|
||||||
if not args.yes_to_all:
|
if not args.yes_to_all:
|
||||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||||
@@ -472,11 +520,82 @@ def env_remove(args):
|
|||||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# env rename
|
||||||
|
#
|
||||||
|
def env_rename_setup_parser(subparser):
|
||||||
|
"""rename an existing environment"""
|
||||||
|
subparser.add_argument(
|
||||||
|
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"-d",
|
||||||
|
"--dir",
|
||||||
|
action="store_true",
|
||||||
|
help="the specified arguments correspond to directory paths",
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def env_rename(args):
|
||||||
|
"""Rename an environment.
|
||||||
|
|
||||||
|
This renames a managed environment or moves an anonymous environment.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Directory option has been specified
|
||||||
|
if args.dir:
|
||||||
|
if not ev.is_env_dir(args.mv_from):
|
||||||
|
tty.die("The specified path does not correspond to a valid spack environment")
|
||||||
|
from_path = Path(args.mv_from)
|
||||||
|
if not args.force:
|
||||||
|
if ev.is_env_dir(args.mv_to):
|
||||||
|
tty.die(
|
||||||
|
"The new path corresponds to an existing environment;"
|
||||||
|
" specify the --force flag to overwrite it."
|
||||||
|
)
|
||||||
|
if Path(args.mv_to).exists():
|
||||||
|
tty.die("The new path already exists; specify the --force flag to overwrite it.")
|
||||||
|
to_path = Path(args.mv_to)
|
||||||
|
|
||||||
|
# Name option being used
|
||||||
|
elif ev.exists(args.mv_from):
|
||||||
|
from_path = ev.environment.environment_dir_from_name(args.mv_from)
|
||||||
|
if not args.force and ev.exists(args.mv_to):
|
||||||
|
tty.die(
|
||||||
|
"The new name corresponds to an existing environment;"
|
||||||
|
" specify the --force flag to overwrite it."
|
||||||
|
)
|
||||||
|
to_path = ev.environment.root(args.mv_to)
|
||||||
|
|
||||||
|
# Neither
|
||||||
|
else:
|
||||||
|
tty.die("The specified name does not correspond to a managed spack environment")
|
||||||
|
|
||||||
|
# Guard against renaming from or to an active environment
|
||||||
|
active_env = ev.active_environment()
|
||||||
|
if active_env:
|
||||||
|
from_env = ev.Environment(from_path)
|
||||||
|
if from_env.path == active_env.path:
|
||||||
|
tty.die("Cannot rename active environment")
|
||||||
|
if to_path == active_env.path:
|
||||||
|
tty.die(f"{args.mv_to} is an active environment")
|
||||||
|
|
||||||
|
shutil.rmtree(to_path, ignore_errors=True)
|
||||||
|
fs.rename(from_path, to_path)
|
||||||
|
tty.msg(f"Successfully renamed environment {args.mv_from} to {args.mv_to}")
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# env list
|
# env list
|
||||||
#
|
#
|
||||||
def env_list_setup_parser(subparser):
|
def env_list_setup_parser(subparser):
|
||||||
"""list available environments"""
|
"""list managed environments"""
|
||||||
|
|
||||||
|
|
||||||
def env_list(args):
|
def env_list(args):
|
||||||
|
|||||||
@@ -18,6 +18,7 @@
|
|||||||
import spack.cray_manifest as cray_manifest
|
import spack.cray_manifest as cray_manifest
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.repo
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
@@ -152,9 +153,9 @@ def external_find(args):
|
|||||||
def packages_to_search_for(
|
def packages_to_search_for(
|
||||||
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
||||||
):
|
):
|
||||||
result = []
|
result = list(
|
||||||
for current_tag in tags:
|
{pkg for tag in tags for pkg in spack.repo.PATH.packages_with_tags(tag, full=True)}
|
||||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True))
|
)
|
||||||
|
|
||||||
if names:
|
if names:
|
||||||
# Match both fully qualified and unqualified
|
# Match both fully qualified and unqualified
|
||||||
|
|||||||
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-m",
|
"-m",
|
||||||
"--missing",
|
"--missing",
|
||||||
@@ -28,7 +28,7 @@ def setup_parser(subparser):
|
|||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(subparser, ["specs"])
|
arguments.add_concretizer_args(subparser)
|
||||||
subparser.epilog = (
|
subparser.epilog = (
|
||||||
"With an active environment, the specs "
|
"With an active environment, the specs "
|
||||||
"parameter can be omitted. In this case all (uninstalled"
|
"parameter can be omitted. In this case all (uninstalled"
|
||||||
@@ -40,9 +40,6 @@ def fetch(parser, args):
|
|||||||
if args.no_checksum:
|
if args.no_checksum:
|
||||||
spack.config.set("config:checksum", False, scope="command_line")
|
spack.config.set("config:checksum", False, scope="command_line")
|
||||||
|
|
||||||
if args.deprecated:
|
|
||||||
spack.config.set("config:deprecated", True, scope="command_line")
|
|
||||||
|
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
import spack.cmd as cmd
|
import spack.cmd as cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.store
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.database import InstallStatuses
|
from spack.database import InstallStatuses
|
||||||
|
|
||||||
@@ -69,6 +70,12 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||||
|
|
||||||
|
subparser.add_argument(
|
||||||
|
"-r",
|
||||||
|
"--only-roots",
|
||||||
|
action="store_true",
|
||||||
|
help="don't show full list of installed specs in an environment",
|
||||||
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-c",
|
"-c",
|
||||||
"--show-concretized",
|
"--show-concretized",
|
||||||
@@ -140,6 +147,12 @@ def setup_parser(subparser):
|
|||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--install-tree",
|
||||||
|
action="store",
|
||||||
|
default="all",
|
||||||
|
help="Install trees to query: 'all' (default), 'local', 'upstream', upstream name or path",
|
||||||
|
)
|
||||||
|
|
||||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||||
@@ -168,6 +181,12 @@ def query_arguments(args):
|
|||||||
|
|
||||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||||
|
|
||||||
|
install_tree = args.install_tree
|
||||||
|
upstreams = spack.config.get("upstreams", {})
|
||||||
|
if install_tree in upstreams.keys():
|
||||||
|
install_tree = upstreams[install_tree]["install_tree"]
|
||||||
|
q_args["install_tree"] = install_tree
|
||||||
|
|
||||||
# Time window of installation
|
# Time window of installation
|
||||||
for attribute in ("start_date", "end_date"):
|
for attribute in ("start_date", "end_date"):
|
||||||
date = getattr(args, attribute)
|
date = getattr(args, attribute)
|
||||||
@@ -177,26 +196,22 @@ def query_arguments(args):
|
|||||||
return q_args
|
return q_args
|
||||||
|
|
||||||
|
|
||||||
def setup_env(env):
|
def make_env_decorator(env):
|
||||||
"""Create a function for decorating specs when in an environment."""
|
"""Create a function for decorating specs when in an environment."""
|
||||||
|
|
||||||
def strip_build(seq):
|
roots = set(env.roots())
|
||||||
return set(s.copy(deps=("link", "run")) for s in seq)
|
removed = set(env.removed_specs())
|
||||||
|
|
||||||
added = set(strip_build(env.added_specs()))
|
|
||||||
roots = set(strip_build(env.roots()))
|
|
||||||
removed = set(strip_build(env.removed_specs()))
|
|
||||||
|
|
||||||
def decorator(spec, fmt):
|
def decorator(spec, fmt):
|
||||||
# add +/-/* to show added/removed/root specs
|
# add +/-/* to show added/removed/root specs
|
||||||
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
||||||
return color.colorize("@*{%s}" % fmt)
|
return color.colorize(f"@*{{{fmt}}}")
|
||||||
elif spec in removed:
|
elif spec in removed:
|
||||||
return color.colorize("@K{%s}" % fmt)
|
return color.colorize(f"@K{{{fmt}}}")
|
||||||
else:
|
else:
|
||||||
return "%s" % fmt
|
return fmt
|
||||||
|
|
||||||
return decorator, added, roots, removed
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
def display_env(env, args, decorator, results):
|
def display_env(env, args, decorator, results):
|
||||||
@@ -211,10 +226,54 @@ def display_env(env, args, decorator, results):
|
|||||||
"""
|
"""
|
||||||
tty.msg("In environment %s" % env.name)
|
tty.msg("In environment %s" % env.name)
|
||||||
|
|
||||||
if not env.user_specs:
|
num_roots = len(env.user_specs) or "No"
|
||||||
tty.msg("No root specs")
|
tty.msg(f"{num_roots} root specs")
|
||||||
else:
|
|
||||||
tty.msg("Root specs")
|
concrete_specs = {
|
||||||
|
root: concrete_root
|
||||||
|
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
|
||||||
|
}
|
||||||
|
|
||||||
|
def root_decorator(spec, string):
|
||||||
|
"""Decorate root specs with their install status if needed"""
|
||||||
|
concrete = concrete_specs.get(spec)
|
||||||
|
if concrete:
|
||||||
|
status = color.colorize(concrete.install_status().value)
|
||||||
|
hash = concrete.dag_hash()
|
||||||
|
else:
|
||||||
|
status = color.colorize(spack.spec.InstallStatus.absent.value)
|
||||||
|
hash = "-" * 32
|
||||||
|
|
||||||
|
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
|
||||||
|
# TODO: space format idiosyncrasies is complicated. Fix this eventually
|
||||||
|
status = status[:-2]
|
||||||
|
|
||||||
|
if args.long or args.very_long:
|
||||||
|
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
|
||||||
|
return f"{status} {hash} {string}"
|
||||||
|
else:
|
||||||
|
return f"{status} {string}"
|
||||||
|
|
||||||
|
with spack.store.STORE.db.read_transaction():
|
||||||
|
cmd.display_specs(
|
||||||
|
env.user_specs,
|
||||||
|
args,
|
||||||
|
# these are overrides of CLI args
|
||||||
|
paths=False,
|
||||||
|
long=False,
|
||||||
|
very_long=False,
|
||||||
|
# these enforce details in the root specs to show what the user asked for
|
||||||
|
namespaces=True,
|
||||||
|
show_flags=True,
|
||||||
|
show_full_compiler=True,
|
||||||
|
decorator=root_decorator,
|
||||||
|
variants=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
if env.included_concrete_envs:
|
||||||
|
tty.msg("Included specs")
|
||||||
|
|
||||||
# Root specs cannot be displayed with prefixes, since those are not
|
# Root specs cannot be displayed with prefixes, since those are not
|
||||||
# set for abstract specs. Same for hashes
|
# set for abstract specs. Same for hashes
|
||||||
@@ -224,10 +283,10 @@ def display_env(env, args, decorator, results):
|
|||||||
# Roots are displayed with variants, etc. so that we can see
|
# Roots are displayed with variants, etc. so that we can see
|
||||||
# specifically what the user asked for.
|
# specifically what the user asked for.
|
||||||
cmd.display_specs(
|
cmd.display_specs(
|
||||||
env.user_specs,
|
env.included_user_specs,
|
||||||
root_args,
|
root_args,
|
||||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||||
namespaces=True,
|
namespace=True,
|
||||||
show_flags=True,
|
show_flags=True,
|
||||||
show_full_compiler=True,
|
show_full_compiler=True,
|
||||||
variants=True,
|
variants=True,
|
||||||
@@ -242,7 +301,7 @@ def display_env(env, args, decorator, results):
|
|||||||
# Display a header for the installed packages section IF there are installed
|
# Display a header for the installed packages section IF there are installed
|
||||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||||
# later.
|
# later.
|
||||||
if results:
|
if results and not args.only_roots:
|
||||||
tty.msg("Installed packages")
|
tty.msg("Installed packages")
|
||||||
|
|
||||||
|
|
||||||
@@ -251,9 +310,10 @@ def find(parser, args):
|
|||||||
results = args.specs(**q_args)
|
results = args.specs(**q_args)
|
||||||
|
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
decorator = lambda s, f: f
|
if not env and args.only_roots:
|
||||||
if env:
|
tty.die("-r / --only-roots requires an active environment")
|
||||||
decorator, _, roots, _ = setup_env(env)
|
|
||||||
|
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||||
|
|
||||||
# use groups by default except with format.
|
# use groups by default except with format.
|
||||||
if args.groups is None:
|
if args.groups is None:
|
||||||
@@ -280,9 +340,12 @@ def find(parser, args):
|
|||||||
if env:
|
if env:
|
||||||
display_env(env, args, decorator, results)
|
display_env(env, args, decorator, results)
|
||||||
|
|
||||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
count_suffix = " (not shown)"
|
||||||
|
if not args.only_roots:
|
||||||
|
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||||
|
count_suffix = ""
|
||||||
|
|
||||||
# print number of installed packages last (as the list may be long)
|
# print number of installed packages last (as the list may be long)
|
||||||
if sys.stdout.isatty() and args.groups:
|
if sys.stdout.isatty() and args.groups:
|
||||||
pkg_type = "loaded" if args.loaded else "installed"
|
pkg_type = "loaded" if args.loaded else "installed"
|
||||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user