Compare commits
784 Commits
colorize-n
...
v0.22.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ba6cb62df8 | ||
![]() |
e1437349d1 | ||
![]() |
c1eb3f965b | ||
![]() |
e0d246210f | ||
![]() |
88b47d2714 | ||
![]() |
342520175d | ||
![]() |
8a55299214 | ||
![]() |
174308ca3d | ||
![]() |
506c176d25 | ||
![]() |
88171ff353 | ||
![]() |
fb8f2d8301 | ||
![]() |
27beb100e1 | ||
![]() |
02613d778d | ||
![]() |
a6065334ad | ||
![]() |
243ee7203a | ||
![]() |
271106e5dd | ||
![]() |
9fb9156e82 | ||
![]() |
d88cfbd839 | ||
![]() |
4127a93a91 | ||
![]() |
5df2189e43 | ||
![]() |
667c1960d0 | ||
![]() |
4449058257 | ||
![]() |
1b03f1d5bc | ||
![]() |
cf4e9cb3b3 | ||
![]() |
d189b12050 | ||
![]() |
5134504fd8 | ||
![]() |
ae5018ee09 | ||
![]() |
106ebeb502 | ||
![]() |
ba89754ee1 | ||
![]() |
e733eb0fd9 | ||
![]() |
07b344bf10 | ||
![]() |
a71c65399e | ||
![]() |
d8b73331f6 | ||
![]() |
4741ea683c | ||
![]() |
f33c18290b | ||
![]() |
5ea67e8882 | ||
![]() |
8ade071253 | ||
![]() |
dca09e6e0f | ||
![]() |
2776402c90 | ||
![]() |
3961a86f86 | ||
![]() |
39e594d096 | ||
![]() |
7a91bed5c9 | ||
![]() |
594a376c52 | ||
![]() |
1538c48616 | ||
![]() |
683e50b8d9 | ||
![]() |
9b32fb0beb | ||
![]() |
2c6df0d491 | ||
![]() |
ce7218acae | ||
![]() |
246eeb2b69 | ||
![]() |
cc47ee3984 | ||
![]() |
7b644719c1 | ||
![]() |
d8a6aa551e | ||
![]() |
ac7b18483a | ||
![]() |
39f37de4ce | ||
![]() |
703e153404 | ||
![]() |
aa013611bc | ||
![]() |
6a7ccd4e46 | ||
![]() |
1c6c4b4690 | ||
![]() |
68558b3dd0 | ||
![]() |
5440fe09cd | ||
![]() |
03c22f403f | ||
![]() |
f339225d22 | ||
![]() |
22c815f3d4 | ||
![]() |
4354288e44 | ||
![]() |
ea2d43b4a6 | ||
![]() |
85e67d60a0 | ||
![]() |
bf6a9ff5ed | ||
![]() |
1bdc30979d | ||
![]() |
ef1eabe5b3 | ||
![]() |
43d673f915 | ||
![]() |
8a9c501030 | ||
![]() |
9f035ca030 | ||
![]() |
d66dce2d66 | ||
![]() |
ef2aa2f5f5 | ||
![]() |
41f5f6eaab | ||
![]() |
cba347e0b7 | ||
![]() |
a3cef0f02e | ||
![]() |
45fca040c3 | ||
![]() |
eb2b5739b2 | ||
![]() |
d299e17d43 | ||
![]() |
d883883be0 | ||
![]() |
249dcb49e2 | ||
![]() |
8628add66b | ||
![]() |
aeccba8bc0 | ||
![]() |
d94e8ab36f | ||
![]() |
e66c26871f | ||
![]() |
2db4ff7061 | ||
![]() |
c248932a94 | ||
![]() |
f15d302fc7 | ||
![]() |
74ef630241 | ||
![]() |
a70ea11e69 | ||
![]() |
a79b1bd9af | ||
![]() |
ac5d5485b9 | ||
![]() |
04258f9cce | ||
![]() |
1b14170bd1 | ||
![]() |
a3bc9dbfe8 | ||
![]() |
e7c86259bd | ||
![]() |
2605aeb072 | ||
![]() |
94536d2b66 | ||
![]() |
5e580fc82e | ||
![]() |
195bad8675 | ||
![]() |
bd9f3f100a | ||
![]() |
b5962613a0 | ||
![]() |
cbcfc7e10a | ||
![]() |
579fadacd0 | ||
![]() |
b86d08b022 | ||
![]() |
02d62cf40f | ||
![]() |
97369776f0 | ||
![]() |
47af0159dc | ||
![]() |
db6ead6fc1 | ||
![]() |
b4aa2c3cab | ||
![]() |
4108de1ce4 | ||
![]() |
5fe93fee1e | ||
![]() |
8207f11333 | ||
![]() |
5bb5d2696f | ||
![]() |
55f37dffe5 | ||
![]() |
252a5bd71b | ||
![]() |
f55224f161 | ||
![]() |
189ae4b06e | ||
![]() |
5e9c702fa7 | ||
![]() |
965bb4d3c0 | ||
![]() |
354f98c94a | ||
![]() |
5dce480154 | ||
![]() |
f634d48b7c | ||
![]() |
4daee565ae | ||
![]() |
8e4dbdc2d7 | ||
![]() |
4f6adc03cd | ||
![]() |
f7afd67a26 | ||
![]() |
d22bdc1c4e | ||
![]() |
540f9eefb7 | ||
![]() |
2db5bca778 | ||
![]() |
bcd05407b8 | ||
![]() |
b35ec605fe | ||
![]() |
0a353abc42 | ||
![]() |
e178c58847 | ||
![]() |
d7297e67a5 | ||
![]() |
ee8addf04a | ||
![]() |
fd3cd3a1c6 | ||
![]() |
e585aeb883 | ||
![]() |
1f43384db4 | ||
![]() |
814b328fca | ||
![]() |
125206d44d | ||
![]() |
a081b875b4 | ||
![]() |
a16ee3348b | ||
![]() |
d654d6b1f4 | ||
![]() |
9b4ca0be40 | ||
![]() |
dc71dcfdc2 | ||
![]() |
1f31c3374c | ||
![]() |
27aeb6e293 | ||
![]() |
715214c1a1 | ||
![]() |
b471d62dbd | ||
![]() |
a5f62889ca | ||
![]() |
2a942d98e3 | ||
![]() |
4a4077d4ef | ||
![]() |
c0fcccc232 | ||
![]() |
0b2cbfefce | ||
![]() |
c499514322 | ||
![]() |
ae392b5935 | ||
![]() |
62e9bb5d51 | ||
![]() |
6cd948184e | ||
![]() |
44ff24f558 | ||
![]() |
c657dfb768 | ||
![]() |
f2e410d95a | ||
![]() |
df443a38d6 | ||
![]() |
74fe498cb8 | ||
![]() |
5f13a48bf2 | ||
![]() |
c4824f7fd2 | ||
![]() |
49a8634584 | ||
![]() |
eac5ea869f | ||
![]() |
f5946c4621 | ||
![]() |
8564ab19c3 | ||
![]() |
aae7a22d39 | ||
![]() |
09cea230b4 | ||
![]() |
a1f34ec58b | ||
![]() |
4d7cd4c0bf | ||
![]() |
4adbfa3835 | ||
![]() |
8a1b69c1d3 | ||
![]() |
a1d69f8661 | ||
![]() |
e05dbc529e | ||
![]() |
99d33bf1f2 | ||
![]() |
bd1918cd71 | ||
![]() |
2a967c7df4 | ||
![]() |
7596aac958 | ||
![]() |
c73ded8ed6 | ||
![]() |
df1d783035 | ||
![]() |
47051c3690 | ||
![]() |
3fd83c637d | ||
![]() |
ef5afb66da | ||
![]() |
ecc4336bf9 | ||
![]() |
d2ed217796 | ||
![]() |
272c7c069a | ||
![]() |
23f16041cd | ||
![]() |
e2329adac0 | ||
![]() |
4ec788ca12 | ||
![]() |
c1cea9d304 | ||
![]() |
5c96e67bb1 | ||
![]() |
7008bb6335 | ||
![]() |
14561fafff | ||
![]() |
89bf1edb6e | ||
![]() |
cc85dc05b7 | ||
![]() |
ae171f8b83 | ||
![]() |
578dd18b34 | ||
![]() |
a7a51ee5cf | ||
![]() |
960cc90667 | ||
![]() |
dea44bad8b | ||
![]() |
e37870ff43 | ||
![]() |
3751642a27 | ||
![]() |
0f386697c6 | ||
![]() |
67ce103b2c | ||
![]() |
a8c9fa0e45 | ||
![]() |
b56a133fce | ||
![]() |
f0b3d33145 | ||
![]() |
32564da9d0 | ||
![]() |
8f2faf65dc | ||
![]() |
1d59637051 | ||
![]() |
97dc353cb0 | ||
![]() |
beebe2c9d3 | ||
![]() |
2eb7add8c4 | ||
![]() |
a9fea9f611 | ||
![]() |
9b62a9c238 | ||
![]() |
f7eb0ccfc9 | ||
![]() |
a0aa35667c | ||
![]() |
b1d4fd14bc | ||
![]() |
7e8415a3a6 | ||
![]() |
7f4f42894d | ||
![]() |
4e876b4014 | ||
![]() |
77a8a4fe08 | ||
![]() |
597e5a4e5e | ||
![]() |
3c31c32f62 | ||
![]() |
3a93a716e4 | ||
![]() |
82229a0784 | ||
![]() |
5d846a69d1 | ||
![]() |
d21aa1cc12 | ||
![]() |
7896ff51f6 | ||
![]() |
5849a24a74 | ||
![]() |
38c49d6b82 | ||
![]() |
0d8900986d | ||
![]() |
62554cebc4 | ||
![]() |
067155cff5 | ||
![]() |
08e68d779f | ||
![]() |
05b04cd4c3 | ||
![]() |
be48f762a9 | ||
![]() |
de5b4840e9 | ||
![]() |
20f9884445 | ||
![]() |
deb78bcd93 | ||
![]() |
06239de0e9 | ||
![]() |
1f904c38b3 | ||
![]() |
f2d0ba8fcc | ||
![]() |
49d3eb1723 | ||
![]() |
7c5439f48a | ||
![]() |
7f2cedd31f | ||
![]() |
d47951a1e3 | ||
![]() |
f2bd0c5cf1 | ||
![]() |
4362382223 | ||
![]() |
ba4859b33d | ||
![]() |
e8472714ef | ||
![]() |
ee6960e53e | ||
![]() |
dad266c955 | ||
![]() |
7a234ce00a | ||
![]() |
a0c2ed97c8 | ||
![]() |
a3aa5b59cd | ||
![]() |
f7dbb59d13 | ||
![]() |
0df27bc0f7 | ||
![]() |
877e09dcc1 | ||
![]() |
c4439e86a2 | ||
![]() |
aa00dcac96 | ||
![]() |
4c9a946b3b | ||
![]() |
0c6e6ad226 | ||
![]() |
bf8f32443f | ||
![]() |
c2eef8bab2 | ||
![]() |
2df4b307d7 | ||
![]() |
3c57440c10 | ||
![]() |
3e6e9829da | ||
![]() |
859745f1a9 | ||
![]() |
ddabb8b12c | ||
![]() |
16bba32124 | ||
![]() |
7d87369ead | ||
![]() |
7723bd28ed | ||
![]() |
43f3a35150 | ||
![]() |
ae9f2d4d40 | ||
![]() |
5a3814ff15 | ||
![]() |
946c539dbd | ||
![]() |
0037462f9e | ||
![]() |
e5edac4d0c | ||
![]() |
3e1474dbbb | ||
![]() |
0f502bb6c3 | ||
![]() |
1eecbd3208 | ||
![]() |
6e92b9180c | ||
![]() |
ac9012da0c | ||
![]() |
e3cb4f09f0 | ||
![]() |
2e8600bb71 | ||
![]() |
d946c37cbb | ||
![]() |
47a9f0bdf7 | ||
![]() |
2bf900a893 | ||
![]() |
99bba0b1ce | ||
![]() |
a8506f9022 | ||
![]() |
4a40a76291 | ||
![]() |
fe9ddf22fc | ||
![]() |
1cae1299eb | ||
![]() |
8b106416c0 | ||
![]() |
e2088b599e | ||
![]() |
56446685ca | ||
![]() |
47a8d875c8 | ||
![]() |
56b2d250c1 | ||
![]() |
abbd09b4b2 | ||
![]() |
9e5fdc6614 | ||
![]() |
1224a3e8cf | ||
![]() |
6c3218920f | ||
![]() |
02cc3ea005 | ||
![]() |
641ab95a31 | ||
![]() |
e8b76c27e4 | ||
![]() |
0dbe4d54b6 | ||
![]() |
1eb6977049 | ||
![]() |
3f1cfdb7d7 | ||
![]() |
d438d7993d | ||
![]() |
aa0825d642 | ||
![]() |
978c20f35a | ||
![]() |
d535124500 | ||
![]() |
01f61a2eba | ||
![]() |
7d5e27d5e8 | ||
![]() |
d210425eef | ||
![]() |
6be07da201 | ||
![]() |
02b38716bf | ||
![]() |
d7bc624c61 | ||
![]() |
b7cecc9726 | ||
![]() |
393a2f562b | ||
![]() |
682fcec0b2 | ||
![]() |
d6baae525f | ||
![]() |
e1f2612581 | ||
![]() |
080fc875eb | ||
![]() |
69f417b26a | ||
![]() |
80b5106611 | ||
![]() |
34146c197a | ||
![]() |
209a3bf302 | ||
![]() |
e8c41cdbcb | ||
![]() |
a450dd31fa | ||
![]() |
7c1a309453 | ||
![]() |
78b6fa96e5 | ||
![]() |
1b315a9ede | ||
![]() |
82df0e549d | ||
![]() |
f5591f9068 | ||
![]() |
98c08d277d | ||
![]() |
facca4e2c8 | ||
![]() |
764029bcd1 | ||
![]() |
44cb4eca93 | ||
![]() |
39888d4df6 | ||
![]() |
f68ea49e54 | ||
![]() |
78b5e4cdfa | ||
![]() |
26515b8871 | ||
![]() |
74640987c7 | ||
![]() |
d6154645c7 | ||
![]() |
faed43704b | ||
![]() |
6fba31ce34 | ||
![]() |
112cead00b | ||
![]() |
9e2558bd56 | ||
![]() |
019058226f | ||
![]() |
ac0040f67d | ||
![]() |
38f341f12d | ||
![]() |
26ad22743f | ||
![]() |
46c2b8a565 | ||
![]() |
5cbb59f2b8 | ||
![]() |
f29fa1cfdf | ||
![]() |
c69951d6e1 | ||
![]() |
f406f27d9c | ||
![]() |
36ea208e12 | ||
![]() |
17e0774189 | ||
![]() |
3162c2459d | ||
![]() |
7cad6c62a3 | ||
![]() |
eb2ddf6fa2 | ||
![]() |
2bc2902fed | ||
![]() |
b362362291 | ||
![]() |
32bb5c7523 | ||
![]() |
a2b76c68a0 | ||
![]() |
62132919e1 | ||
![]() |
b06929f6df | ||
![]() |
0f33de157b | ||
![]() |
03a074ebe7 | ||
![]() |
4d12b6a4fd | ||
![]() |
26bb15e1fb | ||
![]() |
1bf92c7881 | ||
![]() |
eefe0b2eec | ||
![]() |
de6c6f0cd9 | ||
![]() |
309d3aa1ec | ||
![]() |
feff11f914 | ||
![]() |
de3b324983 | ||
![]() |
747cd374df | ||
![]() |
8b3ac40436 | ||
![]() |
28e9be443c | ||
![]() |
1381bede80 | ||
![]() |
6502785908 | ||
![]() |
53257408a3 | ||
![]() |
28d02dff60 | ||
![]() |
9d60b42a97 | ||
![]() |
9ff5a30574 | ||
![]() |
9a6c013365 | ||
![]() |
9f62a3e819 | ||
![]() |
e380e9a0ab | ||
![]() |
8415ea9ada | ||
![]() |
6960766e0c | ||
![]() |
0c2ca8c841 | ||
![]() |
273960fdbb | ||
![]() |
0cd2a1102c | ||
![]() |
e40676e901 | ||
![]() |
4ddb07e94f | ||
![]() |
50585d55c5 | ||
![]() |
5d6b5f3f6f | ||
![]() |
2351c19489 | ||
![]() |
08d49361f0 | ||
![]() |
c3c63e5ca4 | ||
![]() |
e72d4075bd | ||
![]() |
f9f97bf22b | ||
![]() |
8033455d5f | ||
![]() |
50a5a6fea4 | ||
![]() |
0de8a0e3f3 | ||
![]() |
0a26e74cc8 | ||
![]() |
9dfd91efbb | ||
![]() |
1a7baadbff | ||
![]() |
afcfd56ae5 | ||
![]() |
7eb2e704b6 | ||
![]() |
564b4fa263 | ||
![]() |
0a941b43ca | ||
![]() |
35ff24ddea | ||
![]() |
7019e4e3cb | ||
![]() |
cb16b8a047 | ||
![]() |
381acb3726 | ||
![]() |
d87ea0b256 | ||
![]() |
1a757e7f70 | ||
![]() |
704e2c53a8 | ||
![]() |
478d8a668c | ||
![]() |
7903f9fcfd | ||
![]() |
670d3d3fdc | ||
![]() |
e8aab6b31c | ||
![]() |
1ce408ecc5 | ||
![]() |
dc81a2dcdb | ||
![]() |
b10f51f020 | ||
![]() |
4f4e3f5607 | ||
![]() |
00fb80e766 | ||
![]() |
057603cad8 | ||
![]() |
5b8b6e492d | ||
![]() |
763279cd61 | ||
![]() |
e4237b9153 | ||
![]() |
d288658cf0 | ||
![]() |
2c22ae0576 | ||
![]() |
fc3fc94689 | ||
![]() |
b5013c1372 | ||
![]() |
e220674c4d | ||
![]() |
7f13518225 | ||
![]() |
96a13a97e6 | ||
![]() |
6d244b3f67 | ||
![]() |
6bc66db141 | ||
![]() |
acfb2b9270 | ||
![]() |
d92a2c31fb | ||
![]() |
e32561aff6 | ||
![]() |
4b0479159f | ||
![]() |
03bfd36926 | ||
![]() |
4d30c8dce4 | ||
![]() |
49d4104f22 | ||
![]() |
07fb83b493 | ||
![]() |
263007ba81 | ||
![]() |
3b6e99381f | ||
![]() |
a30af1ac54 | ||
![]() |
294742ab7b | ||
![]() |
6391559fb6 | ||
![]() |
d4d4f813a9 | ||
![]() |
4667163dc4 | ||
![]() |
439f105285 | ||
![]() |
f65b1fd7b6 | ||
![]() |
d23e06c27e | ||
![]() |
b76e9a887b | ||
![]() |
55ffd439ce | ||
![]() |
d8a7b88e7b | ||
![]() |
aaa1bb1d98 | ||
![]() |
0d94b8044b | ||
![]() |
5a52780f7c | ||
![]() |
dd0a8452ee | ||
![]() |
c467bba73e | ||
![]() |
d680a0cb99 | ||
![]() |
efadee26ef | ||
![]() |
2077b3a006 | ||
![]() |
8e0c659b51 | ||
![]() |
863ab5a597 | ||
![]() |
db4e76ab27 | ||
![]() |
6728a46a84 | ||
![]() |
5a09459dd5 | ||
![]() |
7e14ff806a | ||
![]() |
7e88cf795c | ||
![]() |
1536e3d422 | ||
![]() |
1fe8e63481 | ||
![]() |
dfca2c285e | ||
![]() |
2686f778fa | ||
![]() |
925e9c73b1 | ||
![]() |
aba447e885 | ||
![]() |
1113de0dad | ||
![]() |
4110225166 | ||
![]() |
24c839c837 | ||
![]() |
42c6a6b189 | ||
![]() |
b0ea1c6f24 | ||
![]() |
735102eb2b | ||
![]() |
2e3cdb349b | ||
![]() |
05c8030119 | ||
![]() |
bbcd4224fa | ||
![]() |
4c0cdb99b3 | ||
![]() |
f22d009c6d | ||
![]() |
c5a3e36ad0 | ||
![]() |
1c76ba1c3e | ||
![]() |
b969f739bd | ||
![]() |
4788c4774c | ||
![]() |
34de028dbc | ||
![]() |
a69254fd79 | ||
![]() |
af5f205759 | ||
![]() |
77f9100a59 | ||
![]() |
386bb71392 | ||
![]() |
0676d6457f | ||
![]() |
0b80e36867 | ||
![]() |
4c9816f10c | ||
![]() |
fb6741cf85 | ||
![]() |
3f2fa256fc | ||
![]() |
d5c8864942 | ||
![]() |
b3cef1072d | ||
![]() |
e8ae9a403c | ||
![]() |
1a8ef161c8 | ||
![]() |
d3913938bc | ||
![]() |
4179880fe6 | ||
![]() |
125dd0368e | ||
![]() |
fd68f8916c | ||
![]() |
93e6f5fa4e | ||
![]() |
54acda3f11 | ||
![]() |
663e20fcc4 | ||
![]() |
6428132ebb | ||
![]() |
171958cf09 | ||
![]() |
0d0f7ab030 | ||
![]() |
35f8b43a54 | ||
![]() |
6f7eb3750c | ||
![]() |
2121eb31ba | ||
![]() |
c68d739825 | ||
![]() |
c468697b35 | ||
![]() |
c4094cf051 | ||
![]() |
9ff9ca61e6 | ||
![]() |
826e0c0405 | ||
![]() |
1b86a842ea | ||
![]() |
558a28bf52 | ||
![]() |
411576e1fa | ||
![]() |
cab4f92960 | ||
![]() |
c6c13f6782 | ||
![]() |
cf11fab5ad | ||
![]() |
1d8b35c840 | ||
![]() |
5dc46a976d | ||
![]() |
05f5596cdd | ||
![]() |
6942c7f35b | ||
![]() |
18f0ac0f94 | ||
![]() |
d9196ee3f8 | ||
![]() |
ef0bb6fe6b | ||
![]() |
3fed320013 | ||
![]() |
1aa77e695d | ||
![]() |
3a0efeecf1 | ||
![]() |
5ffb5657c9 | ||
![]() |
2b3e7fd10a | ||
![]() |
cb315e18f0 | ||
![]() |
10c637aca0 | ||
![]() |
fb4e1cad45 | ||
![]() |
3054b71e2e | ||
![]() |
47163f7435 | ||
![]() |
e322a8382f | ||
![]() |
53fb4795ca | ||
![]() |
4517c7fa9b | ||
![]() |
efaed17f91 | ||
![]() |
2c17cd365d | ||
![]() |
dfe537f688 | ||
![]() |
be0002b460 | ||
![]() |
743ee5f3de | ||
![]() |
b6caf0156f | ||
![]() |
ec00ffc244 | ||
![]() |
f020256b9f | ||
![]() |
04377e39e0 | ||
![]() |
ba2703fea6 | ||
![]() |
92b1c8f763 | ||
![]() |
2b29ecd9b6 | ||
![]() |
5b43bf1b58 | ||
![]() |
37d9770e02 | ||
![]() |
0e016ba6f5 | ||
![]() |
7afa949da1 | ||
![]() |
b81d7d0aac | ||
![]() |
e78484f501 | ||
![]() |
6fd43b4e75 | ||
![]() |
14edb55288 | ||
![]() |
f062f1c5b3 | ||
![]() |
7756c8f4fc | ||
![]() |
69c8a9e4ba | ||
![]() |
47c0736952 | ||
![]() |
8b89287084 | ||
![]() |
8bd6283b52 | ||
![]() |
179e4f3ad1 | ||
![]() |
e97787691b | ||
![]() |
5932ee901c | ||
![]() |
3bdebeba3c | ||
![]() |
d390ee1902 | ||
![]() |
4f9fe6f9bf | ||
![]() |
df6d6d9b5c | ||
![]() |
e57d33b29f | ||
![]() |
85c6d6dbab | ||
![]() |
5f9228746e | ||
![]() |
9f2451ddff | ||
![]() |
a05eb11b7b | ||
![]() |
ae2d0ff1cd | ||
![]() |
7e906ced75 | ||
![]() |
647e89f6bc | ||
![]() |
3239c29fb0 | ||
![]() |
abced0e87d | ||
![]() |
300fc2ee42 | ||
![]() |
13c4258e54 | ||
![]() |
f29cb7f953 | ||
![]() |
826b8f25c5 | ||
![]() |
ebaeea7820 | ||
![]() |
f76eb993aa | ||
![]() |
0b2c370a83 | ||
![]() |
6a9ee480bf | ||
![]() |
cc80d52b62 | ||
![]() |
b9c7d3b89b | ||
![]() |
c1be6a5483 | ||
![]() |
42550208c3 | ||
![]() |
be231face6 | ||
![]() |
89ac747a76 | ||
![]() |
5d8f36d667 | ||
![]() |
6c3fed351f | ||
![]() |
b9cbd15674 | ||
![]() |
b8f633246a | ||
![]() |
a2f3e98ab9 | ||
![]() |
acffe37313 | ||
![]() |
249e5415e8 | ||
![]() |
e2a942d07e | ||
![]() |
32deca2a4c | ||
![]() |
e4c64865f1 | ||
![]() |
1175f37577 | ||
![]() |
faa183331f | ||
![]() |
bbac33871c | ||
![]() |
6d4dd33c46 | ||
![]() |
579bad05a8 | ||
![]() |
27a8eb0f68 | ||
![]() |
4cd993070f | ||
![]() |
4c55c6a268 | ||
![]() |
a4a27fb1e4 | ||
![]() |
66345e7185 | ||
![]() |
8f76f1b0d8 | ||
![]() |
4cab6f3af5 | ||
![]() |
0d4665583b | ||
![]() |
5d0ef9e4f4 | ||
![]() |
e145baf619 | ||
![]() |
6c912b30a2 | ||
![]() |
f4da453f6b | ||
![]() |
7e9caed8c2 | ||
![]() |
69509a6d9a | ||
![]() |
0841050d20 | ||
![]() |
321ffd732b | ||
![]() |
22922323e3 | ||
![]() |
0b5b192c18 | ||
![]() |
1275c57d88 | ||
![]() |
29a39ac6a0 | ||
![]() |
ae9c86a930 | ||
![]() |
83199a981d | ||
![]() |
ed40c3210e | ||
![]() |
be96460ab2 | ||
![]() |
95caf55fe7 | ||
![]() |
960af24270 | ||
![]() |
899bef2aa8 | ||
![]() |
f0f092d9f1 | ||
![]() |
6eaac2270d | ||
![]() |
a9f3f6c007 | ||
![]() |
08a04ebd46 | ||
![]() |
d8e642ecb7 | ||
![]() |
669ed69d8e | ||
![]() |
7ebb21a0da | ||
![]() |
93ffa9ba5d | ||
![]() |
e5fdb90496 | ||
![]() |
303a0b3653 | ||
![]() |
9f07544bde | ||
![]() |
9b046a39a8 | ||
![]() |
0c9a53ba3a | ||
![]() |
1fd4353289 | ||
![]() |
fcb8ed6409 | ||
![]() |
2f11862832 | ||
![]() |
bff11ce8e7 | ||
![]() |
218693431c | ||
![]() |
e036cd9ef6 | ||
![]() |
cd5bef6780 | ||
![]() |
159e9a20d1 | ||
![]() |
99bb288db7 | ||
![]() |
99744a766b | ||
![]() |
ddd8be51a0 | ||
![]() |
bba66b1063 | ||
![]() |
1c3c21d9c7 | ||
![]() |
cbe9b3d01c | ||
![]() |
0abf5ba43c | ||
![]() |
9ab3c1332b | ||
![]() |
b6425da50f | ||
![]() |
937a4dbf69 | ||
![]() |
cd779ee54d | ||
![]() |
7ddcb13325 | ||
![]() |
7666046ce3 | ||
![]() |
8e89e61402 | ||
![]() |
d0dbfaa5d6 | ||
![]() |
26f562b5a7 | ||
![]() |
2967804da1 | ||
![]() |
c3eaf4d6cf | ||
![]() |
397334a4be | ||
![]() |
434836be81 | ||
![]() |
7b9b976f40 | ||
![]() |
4746e8a048 | ||
![]() |
69c684fef9 | ||
![]() |
2314aeb884 | ||
![]() |
d33e10a695 | ||
![]() |
7668a0889a | ||
![]() |
d7a74bde9f | ||
![]() |
fedf8128ae | ||
![]() |
f70af2cc57 | ||
![]() |
50562e6a0e | ||
![]() |
4ac51b2127 | ||
![]() |
81c9e346dc | ||
![]() |
73e16a7881 | ||
![]() |
af8868fa47 | ||
![]() |
cfd4e356f8 | ||
![]() |
fc87dcad4c | ||
![]() |
65472159c7 | ||
![]() |
d1f9d8f06d | ||
![]() |
67ac9c46a8 | ||
![]() |
aa39465188 | ||
![]() |
09810a5e7c | ||
![]() |
446c0f2325 | ||
![]() |
c4ce51c9be | ||
![]() |
1f63a764ac | ||
![]() |
384e198304 | ||
![]() |
2303332415 | ||
![]() |
0eb1957999 | ||
![]() |
de1f9593c6 | ||
![]() |
65fa71c1b4 | ||
![]() |
9802649716 | ||
![]() |
8d9d721f07 | ||
![]() |
ecef72c471 | ||
![]() |
485b6e2170 | ||
![]() |
ba02c6b70f | ||
![]() |
7028669d50 | ||
![]() |
2f0a73f7ef | ||
![]() |
7cb0dbf77a | ||
![]() |
ac8800ffc7 | ||
![]() |
eb11fa7d18 | ||
![]() |
4d8381a775 | ||
![]() |
de5e20fc21 | ||
![]() |
c33af49ed5 | ||
![]() |
3addda6c4d | ||
![]() |
33f6f55d6b | ||
![]() |
41d20d3731 | ||
![]() |
dde8fa5561 | ||
![]() |
588a94bc8c | ||
![]() |
06392f2c01 | ||
![]() |
f16e29559e | ||
![]() |
ea96403157 | ||
![]() |
b659eac453 | ||
![]() |
ab590cc03a | ||
![]() |
1a007a842b | ||
![]() |
9756354998 | ||
![]() |
3984dd750c | ||
![]() |
d5c1e16e43 | ||
![]() |
56ace9a087 | ||
![]() |
6e0bab1706 | ||
![]() |
193386f6ac | ||
![]() |
755131fcdf | ||
![]() |
9a71733adb | ||
![]() |
cd919d51ea | ||
![]() |
12adf66d07 | ||
![]() |
c02f58da8f | ||
![]() |
9662d181a0 | ||
![]() |
282df7aecc | ||
![]() |
b4c0e6f03b | ||
![]() |
4cd8488139 | ||
![]() |
69a052841c | ||
![]() |
a3f39890c2 | ||
![]() |
02d126ce2b | ||
![]() |
339a63370f | ||
![]() |
fef6aed627 | ||
![]() |
3445da807e | ||
![]() |
429c3598af | ||
![]() |
3d8136493a | ||
![]() |
8cd160db85 | ||
![]() |
a7dd756b34 | ||
![]() |
53be280681 |
4
.devcontainer/devcontainer.json
Normal file
4
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
20
.devcontainer/postCreateCommand.sh
Executable file
20
.devcontainer/postCreateCommand.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Load spack environment at terminal startup
|
||||
cat <<EOF >> /root/.bashrc
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
EOF
|
||||
|
||||
# Load spack environment in this script
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
|
||||
# Ensure generic targets for maximum matching with buildcaches
|
||||
spack config --scope site add "packages:all:require:[target=x86_64_v3]"
|
||||
spack config --scope site add "concretizer:targets:granularity:generic"
|
||||
|
||||
# Find compiler and install gcc-runtime
|
||||
spack compiler find --scope site
|
||||
|
||||
# Setup buildcaches
|
||||
spack mirror add --scope site develop https://binaries.spack.io/develop
|
||||
spack buildcache keys --install --trust
|
42
.github/workflows/audit.yaml
vendored
42
.github/workflows/audit.yaml
vendored
@@ -17,33 +17,53 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
runs-on: ${{ matrix.system.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
system:
|
||||
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||
- { os: ubuntu-latest, shell: bash }
|
||||
- { os: macos-latest, shell: bash }
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||
- name: Setup for Windows run
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
python -m pip install --upgrade pywin32
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' }}
|
||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
5
.github/workflows/bootstrap-test.sh
vendored
5
.github/workflows/bootstrap-test.sh
vendored
@@ -1,7 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
set -e
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack -d solve zlib
|
||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
exit 0
|
||||
|
344
.github/workflows/bootstrap.yml
vendored
344
.github/workflows/bootstrap.yml
vendored
@@ -13,118 +13,22 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
fedora-clingo-sources:
|
||||
distros-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
container: ${{ matrix.image }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup Fedora
|
||||
if: ${{ matrix.image == 'fedora:latest' }}
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
run: |
|
||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||
zypper update -y || zypper update -y
|
||||
@@ -133,15 +37,9 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -151,77 +49,98 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
clingo-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
macos-version: ['macos-11', 'macos-12']
|
||||
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree gawk
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
# NOTE: test all pythons that exist, not all do on 12
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
from-binaries:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
3.9
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
echo "Testing $ver_dir"
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
@@ -236,122 +155,9 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
26
.github/workflows/build-containers.yml
vendored
26
.github/workflows/build-containers.yml
vendored
@@ -45,17 +45,18 @@ jobs:
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -87,16 +88,16 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
@@ -113,10 +114,21 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@af5a7ed5ba88268d5278f7203fb52cd833f66d6e
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
merge-dockerfiles:
|
||||
runs-on: ubuntu-latest
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
delete-merged: true
|
||||
|
11
.github/workflows/ci.yaml
vendored
11
.github/workflows/ci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
all-prechecks:
|
||||
@@ -35,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -70,16 +71,14 @@ jobs:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
needs: [ unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==24.3.0
|
||||
black==24.4.2
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
|
68
.github/workflows/unit_tests.yaml
vendored
68
.github/workflows/unit_tests.yaml
vendored
@@ -51,10 +51,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -91,17 +91,19 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -122,9 +124,11 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
@@ -137,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -156,10 +160,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -181,20 +185,23 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -216,6 +223,39 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
11
.github/workflows/valid-style.yml
vendored
11
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -56,6 +56,7 @@ jobs:
|
||||
share/spack/qa/run-style-tests
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
@@ -69,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
79
.github/workflows/windows_python.yml
vendored
79
.github/workflows/windows_python.yml
vendored
@@ -1,79 +0,0 @@
|
||||
name: windows
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
@@ -14,3 +14,26 @@ sphinx:
|
||||
python:
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
search:
|
||||
ranking:
|
||||
spack.html: -10
|
||||
spack.*.html: -10
|
||||
llnl.html: -10
|
||||
llnl.*.html: -10
|
||||
_modules/*: -10
|
||||
command_index.html: -9
|
||||
basic_usage.html: 5
|
||||
configuration.html: 5
|
||||
config_yaml.html: 5
|
||||
packages_yaml.html: 5
|
||||
build_settings.html: 5
|
||||
environments.html: 5
|
||||
containers.html: 5
|
||||
mirrors.html: 5
|
||||
module_file_support.html: 5
|
||||
repositories.html: 5
|
||||
binary_caches.html: 5
|
||||
chain.html: 5
|
||||
pipelines.html: 5
|
||||
packaging_guide.html: 5
|
||||
|
458
CHANGELOG.md
458
CHANGELOG.md
@@ -1,3 +1,461 @@
|
||||
# v0.22.3 (2024-11-18)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Python 3.13 (#46775, #46983, #47035, #47175)
|
||||
- `archspec` was updated to v0.2.5 (#46503, #46958)
|
||||
- Fix path to Spack in `spack env depfile` makefile (#46966)
|
||||
- Fix `glibc` detection in Chinese locales (#47434)
|
||||
- Fix pickle round-trip of specs propagating variants (#47351)
|
||||
- Fix a bug where concurrent spack install commands would not always update explicits correctly
|
||||
(#47358)
|
||||
- Fix a bug where autopush would run before all post install hooks modifying the install prefix
|
||||
had run (#47329)
|
||||
- Fix `spack find -u` (#47102)
|
||||
- Fix a bug where sometimes the wrong Python interpreter was used for build dependencies such as
|
||||
`py-setuptools` (#46980)
|
||||
- Fix default config errors found by `spack audit externals` (#47308)
|
||||
- Fix duplicate printing of external roots in installer (#44917)
|
||||
- Fix modules schema in `compilers.yaml` (#47197)
|
||||
- Reduce the size of generated YAML for Gitlab CI (#44995)
|
||||
- Handle missing metadata file gracefully in bootstrap (#47278)
|
||||
- Show underlying errors on fetch failure (#45714)
|
||||
- Recognize `.` and `..` as paths instead of names in buildcache commands (#47105)
|
||||
- Documentation and style (#46991, #47107, #47110, #47111, #47346, #47307, #47309, #47328, #47160,
|
||||
#47402, #47557, #46709, #47080)
|
||||
- Tests and CI fixes (#47165, #46711)
|
||||
|
||||
## Package updates
|
||||
- ffmpeg: fix hash of patch (#45574)
|
||||
|
||||
# v0.22.2 (2024-09-21)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
||||
- Support macOS Sequoia (#45018, #45127)
|
||||
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
||||
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
||||
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
||||
- Do not halt concretization on unknown variants in externals (#45326)
|
||||
- Improve validation of `develop` config section (#46485)
|
||||
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
||||
- Improve backwards compatibility in `include_concrete` (#45766)
|
||||
- Fix issue where package tags were sometimes repeated (#45160)
|
||||
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
||||
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
||||
- Remove debug statements in package hash computation (#45235)
|
||||
- Remove redundant clingo warnings (#45269)
|
||||
- Remove hard-coded layout version (#45645)
|
||||
- Do not initialize previous store state in `use_store` (#45268)
|
||||
- Docs improvements (#46475)
|
||||
|
||||
## Package updates
|
||||
- `chapel` major update (#42197, #44931, #45304)
|
||||
|
||||
# v0.22.1 (2024-07-04)
|
||||
|
||||
## Bugfixes
|
||||
- Fix reuse of externals on Linux (#44316)
|
||||
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
||||
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
||||
- Improve version detection of glibc (#44154)
|
||||
- Improve heuristics for solver (#44893, #44976, #45023)
|
||||
- Make strong preferences override reuse (#44373)
|
||||
- Reduce verbosity when C compiler is missing (#44182)
|
||||
- Make missing ccache executable an error when required (#44740)
|
||||
- Make every environment view containing `python` a `venv` (#44382)
|
||||
- Fix external detection for compilers with os but no target (#44156)
|
||||
- Fix version optimization for roots (#44272)
|
||||
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
||||
- Apply fetched patches to develop specs (#44950)
|
||||
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
||||
- Fix issue with long filenames in build caches on Windows (#43851)
|
||||
- Fix formatting issue in `spack audit` (#45045)
|
||||
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
||||
|
||||
## Package updates
|
||||
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
||||
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
||||
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
||||
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
||||
- Remove mesa18 and libosmesa (#44264)
|
||||
- Enforce consistency of `gl` providers (#44307)
|
||||
- Require libiconv for iconv (#44335, #45026).
|
||||
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
||||
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
||||
- py-matplotlib: qualify when to do a post install (#44191)
|
||||
- rust: fix v1.78.0 instructions (#44127)
|
||||
- suite-sparse: improve setting of the `libs` property (#44214)
|
||||
- netlib-lapack: provide blas and lapack together (#44981)
|
||||
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
`v0.22.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **Compiler dependencies**
|
||||
|
||||
We are in the process of making compilers proper dependencies in Spack, and a number
|
||||
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
||||
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
||||
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
||||
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
||||
other externals. We are trying to do this with the least disruption possible, so
|
||||
your existing `compilers.yaml` files should still work. We expect to be done with
|
||||
this transition by the `v0.23` release in November.
|
||||
|
||||
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
||||
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
||||
libraries. This enables gcc runtime libraries to be installed and relocated when
|
||||
using a build cache. When building minimal Spack-generated container images it is
|
||||
no longer necessary to install libgfortran, libgomp etc. using the system package
|
||||
manager.
|
||||
|
||||
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
||||
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
||||
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
||||
packages. This allows us to model library soname compatibility and allows
|
||||
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
||||
provided by standalone libraries). Note that until we have an agreement in place
|
||||
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
||||
and must be downloaded outside of Spack.
|
||||
|
||||
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
||||
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
||||
not try to strictly match compilers or targets of reused specs. Users can still
|
||||
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
||||
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
||||
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
||||
|
||||
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
||||
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
||||
cache, we do not require you to also have a local configured compiler to *use* the
|
||||
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
||||
updates that happen underneath Spack.
|
||||
|
||||
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
||||
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
||||
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
||||
with a `libc` with the same name and a version less than or equal to that of the
|
||||
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
||||
|
||||
* #43464: each package that can provide a compiler is now detectable using `spack
|
||||
external find`. External packages defining compiler paths are effectively used as
|
||||
compilers, and `spack external find -t compiler` can be used as a substitute for
|
||||
`spack compiler find`. More details on this transition are in
|
||||
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
||||
|
||||
2. **Improved `spack find` UI for Environments**
|
||||
|
||||
If you're working in an enviroment, you likely care about:
|
||||
|
||||
* What are the roots
|
||||
* Which ones are installed / not installed
|
||||
* What's been added that still needs to be concretized
|
||||
|
||||
We've tweaked `spack find` in environments to show this information much more
|
||||
clearly. Installation status is shown next to each root, so you can see what is
|
||||
installed. Roots are also shown in bold in the list of installed packages. There is
|
||||
also a new option for `spack find -r` / `--only-roots` that will only show env
|
||||
roots, if you don't want to look at all the installed specs.
|
||||
|
||||
More details in #42334.
|
||||
|
||||
3. **Improved command-line string quoting**
|
||||
|
||||
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
||||
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
||||
something like this on the command line:
|
||||
|
||||
```
|
||||
spack install zlib cflags=\"-O2 -g\"
|
||||
```
|
||||
|
||||
That will now result in an error, but you can now write what you probably expected
|
||||
to work in the first place:
|
||||
|
||||
```
|
||||
spack install zlib cflags="-O2 -g"
|
||||
```
|
||||
|
||||
Quoted can also now include special characters, so you can supply flags like:
|
||||
|
||||
```
|
||||
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||
```
|
||||
|
||||
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
||||
and `==` when for flags or variants. This would not have broken before but will now
|
||||
result in an error:
|
||||
|
||||
```
|
||||
spack install zlib cflags = "-O2 -g"
|
||||
```
|
||||
|
||||
More details and discussion in #30634.
|
||||
|
||||
4. **Revert default `spack install` behavior to `--reuse`**
|
||||
|
||||
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
||||
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
||||
attempt to build a new version of the requested package / any environment roots.
|
||||
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
||||
think it should be the default for a package manager.
|
||||
|
||||
We are going to try to stick to this policy:
|
||||
1. Prioritize reuse and build as little as possible by default.
|
||||
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
||||
is a known security issue that necessitates an upgrade.
|
||||
|
||||
With the install command you now have three options:
|
||||
|
||||
* `--reuse` (default): reuse as many existing installations as possible.
|
||||
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
||||
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
||||
|
||||
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
||||
that it may give you fresh versions. More details in #41302 and #43988.
|
||||
|
||||
5. **More control over reused specs**
|
||||
|
||||
You can now control which packages to reuse and how. There is a new
|
||||
`concretizer:reuse` config option, which accepts the following properties:
|
||||
|
||||
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
||||
- `exclude`: list of constraints used to select which specs *not* to reuse
|
||||
- `include`: list of constraints used to select which specs *to* reuse
|
||||
- `from`: list of sources for reused specs (some combination of `local`,
|
||||
`buildcache`, or `external`)
|
||||
|
||||
For example, to reuse only specs compiled with GCC, you could write:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
```
|
||||
|
||||
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
exclude: ["openmpi"]
|
||||
- type: buildcache
|
||||
exclude: ["openmpi"]
|
||||
- type: external
|
||||
include: ["openmpi"]
|
||||
```
|
||||
|
||||
6. **New `redistribute()` directive**
|
||||
|
||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
||||
way to say that in a package.
|
||||
|
||||
Now there is a `redistribute()` directive so that package authors can write:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
redistribute(source=False, binary=False)
|
||||
```
|
||||
|
||||
Like other directives, this works with `when=`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
# 12.0 and higher are proprietary
|
||||
redistribute(source=False, binary=False, when="@12.0:")
|
||||
|
||||
# can't redistribute when we depend on some proprietary dependency
|
||||
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
||||
```
|
||||
|
||||
More in #20185.
|
||||
|
||||
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
||||
|
||||
Previously, you could express conflicts and preferences in `packages.yaml` through
|
||||
some contortions with `require:`:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
require:
|
||||
- one_of: ["%clang", "@:"] # conflict on %clang
|
||||
- any_of: ["+shared", "@:"] # strong preference for +shared
|
||||
```
|
||||
|
||||
You can now use `require:` and `prefer:` for a much more readable configuration:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
conflict:
|
||||
- "%clang"
|
||||
prefer:
|
||||
- "+shared"
|
||||
```
|
||||
|
||||
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
||||
and #41832 for more details.
|
||||
|
||||
8. **`include_concrete` in environments**
|
||||
|
||||
You may want to build on the *concrete* contents of another environment without
|
||||
changing that environment. You can now include the concrete specs from another
|
||||
environment's `spack.lock` with `include_concrete`:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /path/to/environment1
|
||||
- /path/to/environment2
|
||||
```
|
||||
|
||||
Now, when *this* environment is concretized, it will bring in the already concrete
|
||||
specs from `environment1` and `environment2`, and build on top of them without
|
||||
changing them. This is useful if you have phased deployments, where old deployments
|
||||
should not be modified but you want to use as many of them as possible. More details
|
||||
in #33768.
|
||||
|
||||
9. **`python-venv` isolation**
|
||||
|
||||
Spack has unique requirements for Python because it:
|
||||
1. installs every package in its own independent directory, and
|
||||
2. allows users to register *external* python installations.
|
||||
|
||||
External installations may contain their own installed packages that can interfere
|
||||
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
||||
`sysconfig` in ways that alter the installation layout of installed Python packages
|
||||
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
||||
from these and other issues, we now insert a small `python-venv` package in between
|
||||
`python` and packages that need to install Python code. This isolates Spack's build
|
||||
environment, isolates Spack from any issues with an external python, and resolves a
|
||||
large number of issues we've had with Python installations.
|
||||
|
||||
See #40773 for further details.
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||
* `spack find`: add options for local/upstream only (#42999)
|
||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||
* `patch`: support reversing patches (#43040)
|
||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||
* directives: add `checked_by` field to `license()`, add some license checks
|
||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||
* Add `--create` to `spack env activate` (#40896)
|
||||
|
||||
## Performance improvements
|
||||
|
||||
* environment.py: fix excessive re-reads (#43746)
|
||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||
* Refactor to improve `spec format` speed (#43712)
|
||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||
* spec.py: early return in `__str__`
|
||||
* avoid `jinja2` import at startup unless needed (#43237)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||
`neoverse-v2` detection.
|
||||
* `spack config get`/`blame`: with no args, show entire config
|
||||
* `spack env create <env>`: dir if dir-like (#44024)
|
||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||
* Add ability to rename environments (#43296)
|
||||
* Add config option and compiler support to reuse across OS's (#42693)
|
||||
* Support for prereleases (#43140)
|
||||
* Only reuse externals when configured (#41707)
|
||||
* Environments: Add support for including views (#42250)
|
||||
|
||||
## Binary caches
|
||||
* Build cache: make signed/unsigned a mirror property (#41507)
|
||||
* tools stack
|
||||
|
||||
## Removals, deprecations, and syntax changes
|
||||
* remove `dpcpp` compiler and package (#43418)
|
||||
* spack load: remove --only argument (#42120)
|
||||
|
||||
## Notable Bugfixes
|
||||
* repo.py: drop deleted packages from provider cache (#43779)
|
||||
* Allow `+` in module file names (#41999)
|
||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||
* Show extension commands with spack -h (#41726)
|
||||
* Support environment variable expansion inside module projections (#42917)
|
||||
* Alert user to failed concretizations (#42655)
|
||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||
* spack mirror create --all: include patches (#41579)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,994 total packages; 525 since `v0.21.0`
|
||||
* 178 new Python packages, 5 new R packages
|
||||
* 358 people contributed to this release
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||
- OCI buildcaches:
|
||||
- only push in parallel when forking (#42143)
|
||||
- use pickleable errors (#42160)
|
||||
- Fix using sticky variants in externals (#42253)
|
||||
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||
|
||||
## Package updates
|
||||
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||
- py-transformers: add v4.35.2 (#41266)
|
||||
- mgard: fix OpenMP on AppleClang (#42933)
|
||||
|
||||
# v0.21.1 (2024-01-11)
|
||||
|
||||
## New features
|
||||
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
- Improve the error message for deprecated preferences (#41075)
|
||||
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||
- Fix multi-word aliases (#41126)
|
||||
- Add a warning for unconfigured compiler (#41213)
|
||||
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||
|
||||
## Package updates
|
||||
- root: add a webgui patch to address security issue (#41404)
|
||||
- BerkeleyGW: update source urls (#38218)
|
||||
|
||||
# v0.21.0 (2023-11-11)
|
||||
|
||||
`v0.21.0` is a major feature release.
|
||||
|
@@ -88,7 +88,7 @@ Resources:
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
@@ -144,3 +144,5 @@ switch($SpackSubCommand)
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
|
||||
exit $LASTEXITCODE
|
||||
|
@@ -15,7 +15,7 @@ concretizer:
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization. If `dependencies`, we'll only reuse dependencies but
|
||||
# give you a fresh concretization for your root specs.
|
||||
reuse: dependencies
|
||||
reuse: true
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
@@ -42,3 +42,8 @@ concretizer:
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
@@ -101,6 +101,12 @@ config:
|
||||
verify_ssl: true
|
||||
|
||||
|
||||
# This is where custom certs for proxy/firewall are stored.
|
||||
# It can be a path or environment variable. To match ssl env configuration
|
||||
# the default is the environment variable SSL_CERT_FILE
|
||||
ssl_certs: $SSL_CERT_FILE
|
||||
|
||||
|
||||
# Suppress gpg warnings from binary package verification
|
||||
# Only suppresses warnings, gpg failure will still fail the install
|
||||
# Potential rationale to set True: users have already explicitly trusted the
|
||||
|
@@ -19,7 +19,6 @@ packages:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
- intel
|
||||
providers:
|
||||
elf: [libelf]
|
||||
fuse: [macfuse]
|
||||
|
3
etc/spack/defaults/linux/packages.yaml
Normal file
3
etc/spack/defaults/linux/packages.yaml
Normal file
@@ -0,0 +1,3 @@
|
||||
packages:
|
||||
iconv:
|
||||
require: [libiconv]
|
@@ -15,15 +15,17 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, intel, pgi, clang, xl, nag, fj, aocc]
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
@@ -34,9 +36,11 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [gcc-runtime]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [intel-oneapi-runtime]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
|
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
{% extends "!layout.html" %}
|
||||
|
||||
{%- block extrahead %}
|
||||
<!-- Google tag (gtag.js) -->
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=G-S0PQ7WV75K"></script>
|
||||
<script>
|
||||
window.dataLayer = window.dataLayer || [];
|
||||
function gtag(){dataLayer.push(arguments);}
|
||||
gtag('js', new Date());
|
||||
gtag('config', 'G-S0PQ7WV75K');
|
||||
</script>
|
||||
{% endblock %}
|
@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
|
||||
installed them. As you've seen, spack packages are installed into long
|
||||
paths with hashes, and you need a way to get them into your path. The
|
||||
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
||||
described in the next section.
|
||||
described in this section.
|
||||
|
||||
Some more advanced ways to use Spack packages include:
|
||||
|
||||
@@ -959,7 +959,86 @@ use ``spack find --loaded``.
|
||||
You can also use ``spack load --list`` to get the same output, but it
|
||||
does not have the full set of query options that ``spack find`` offers.
|
||||
|
||||
We'll learn more about Spack's spec syntax in the next section.
|
||||
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
|
||||
|
||||
|
||||
.. _extensions:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Python packages and virtual environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack can install a large number of Python packages. Their names are
|
||||
typically prefixed with ``py-``. Installing and using them is no
|
||||
different from any other package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install py-numpy
|
||||
$ spack load py-numpy
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
The ``spack load`` command sets the ``PATH`` variable so that the right Python
|
||||
executable is used, and makes sure that ``numpy`` and its dependencies can be
|
||||
located in the ``PYTHONPATH``.
|
||||
|
||||
Spack is different from other Python package managers in that it installs
|
||||
every package into its *own* prefix. This is in contrast to ``pip``, which
|
||||
installs all packages into the same prefix, be it in a virtual environment
|
||||
or not.
|
||||
|
||||
For many users, **virtual environments** are more convenient than repeated
|
||||
``spack load`` commands, particularly when working with multiple Python
|
||||
packages. Fortunately Spack supports environments itself, which together
|
||||
with a view are no different from Python virtual environments.
|
||||
|
||||
The recommended way of working with Python extensions such as ``py-numpy``
|
||||
is through :ref:`Environments <environments>`. The following example creates
|
||||
a Spack environment with ``numpy`` in the current working directory. It also
|
||||
puts a filesystem view in ``./view``, which is a more traditional combined
|
||||
prefix for all packages in the environment.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
Now you can activate the environment and start using the packages:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
The environment view is also a virtual environment, which is useful if you are
|
||||
sharing the environment with others who are unfamiliar with Spack. They can
|
||||
either use the Python executable directly:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python3
|
||||
>>> import numpy
|
||||
|
||||
or use the activation script:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source ./view/bin/activate
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
In general, there should not be much difference between ``spack env activate``
|
||||
and using the virtual environment. The main advantage of ``spack env activate``
|
||||
is that it knows about more packages than just Python packages, and it may set
|
||||
additional runtime variables that are not covered by the virtual environment
|
||||
activation script.
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
|
||||
.. _sec-specs:
|
||||
@@ -1119,6 +1198,9 @@ and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
|
||||
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
||||
includes any version with major version ``3``.
|
||||
|
||||
Versions are ordered lexicograpically by its components. For more details
|
||||
on the order, see :ref:`the packaging guide <version-comparison>`.
|
||||
|
||||
Notice that you can distinguish between the specific version ``@=3.2`` and
|
||||
the range ``@3.2``. This is useful for packages that follow a versioning
|
||||
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
||||
@@ -1702,165 +1784,6 @@ check only local packages (as opposed to those used transparently from
|
||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||
machine-readable json data for any errors.
|
||||
|
||||
|
||||
.. _extensions:
|
||||
|
||||
---------------------------
|
||||
Extensions & Python support
|
||||
---------------------------
|
||||
|
||||
Spack's installation model assumes that each package will live in its
|
||||
own install prefix. However, certain packages are typically installed
|
||||
*within* the directory hierarchy of other packages. For example,
|
||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||
|
||||
In Spack, installation prefixes are immutable, so this type of installation
|
||||
is not directly supported. However, it is possible to create views that
|
||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||
Views are a convenient way to get a more traditional filesystem structure.
|
||||
Using *extensions*, you can ensure that Python packages always share the
|
||||
same prefix in the view as Python itself. Suppose you have
|
||||
Python installed like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find python
|
||||
==> 1 installed packages.
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
python@2.7.8
|
||||
|
||||
.. _cmd-spack-extensions:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack extensions``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can find extensions for your Python installation like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
||||
py-epydoc py-mx py-pylint py-scipy
|
||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
The extensions are a subset of what's returned by ``spack list``, and
|
||||
they are packages like any other. They are installed into their own
|
||||
prefixes, and you can see this with ``spack find --paths``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --paths py-numpy
|
||||
==> 1 installed packages.
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
||||
|
||||
However, even though this package is installed, you cannot use it
|
||||
directly when you run ``python``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load python
|
||||
$ python
|
||||
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
|
||||
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
>>> import numpy
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
ImportError: No module named numpy
|
||||
>>>
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Extensions in Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The recommended way of working with extensions such as ``py-numpy``
|
||||
above is through :ref:`Environments <environments>`. For example,
|
||||
the following creates an environment in the current working directory
|
||||
with a filesystem view in the ``./view`` directory:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
We recommend environments for two reasons. Firstly, environments
|
||||
can be activated (requires :ref:`shell-support`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
|
||||
which sets all the right environment variables such as ``PATH`` and
|
||||
``PYTHONPATH``. This ensures that
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
works. Secondly, even without shell support, the view ensures
|
||||
that Python can locate its extensions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python
|
||||
>>> import numpy
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``spack load``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A more traditional way of using Spack and extensions is ``spack load``
|
||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||
in your current shell, and Python itself will be available in the ``PATH``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load py-numpy
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||
through your environment modules (using ``environment-modules`` or
|
||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||
your current shell.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load <name of numpy module>
|
||||
|
||||
If you do not know the name of the specific numpy module you wish to
|
||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||
the name of the module from the Spack spec.
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -220,6 +220,40 @@ section of the configuration:
|
||||
|
||||
.. _binary_caches_oci:
|
||||
|
||||
---------------------------------
|
||||
Automatic push to a build cache
|
||||
---------------------------------
|
||||
|
||||
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --autopush <name> <url or path>
|
||||
|
||||
Or the autopush flag can be set for an existing mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
|
||||
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
|
||||
|
||||
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
|
||||
will have the same effect as
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
$ spack buildcache push <cache> <package> # for all caches with autopush: true
|
||||
|
||||
.. note::
|
||||
|
||||
Packages are automatically pushed to a build cache only if they are built from source.
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
@@ -21,23 +21,86 @@ is the following:
|
||||
Reuse already installed packages
|
||||
--------------------------------
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
The ``reuse`` attribute controls how aggressively Spack reuses binary packages during concretization. The
|
||||
attribute can either be a single value, or an object for more complex configurations.
|
||||
|
||||
In the former case ("single value") it allows Spack to:
|
||||
|
||||
1. Reuse installed packages and buildcaches for all the specs to be concretized, when ``true``
|
||||
2. Reuse installed packages and buildcaches only for the dependencies of the root specs, when ``dependencies``
|
||||
3. Disregard reusing installed packages and buildcaches, when ``false``
|
||||
|
||||
In case a finer control over which specs are reused is needed, then the value of this attribute can be
|
||||
an object, with the following keys:
|
||||
|
||||
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
|
||||
2. ``from``: list of sources from which reused specs are taken
|
||||
|
||||
Each source in ``from`` is itself an object:
|
||||
|
||||
.. list-table:: Attributes for a source or reusable specs
|
||||
:header-rows: 1
|
||||
|
||||
* - Attribute name
|
||||
- Description
|
||||
* - type (mandatory, string)
|
||||
- Can be ``local``, ``buildcache``, or ``external``
|
||||
* - include (optional, list of specs)
|
||||
- If present, reusable specs must match at least one of the constraint in the list
|
||||
* - exclude (optional, list of specs)
|
||||
- If present, reusable specs must not match any of the constraint in the list.
|
||||
|
||||
For instance, the following configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
include:
|
||||
- "%gcc"
|
||||
- "%clang"
|
||||
|
||||
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
|
||||
in the local store. Any spec from remote buildcaches is disregarded.
|
||||
|
||||
To reduce the boilerplate in configuration files, default values for the ``include`` and
|
||||
``exclude`` options can be pushed up one level:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
from:
|
||||
- type: local
|
||||
- type: buildcache
|
||||
- type: local
|
||||
include:
|
||||
- "foo %oneapi"
|
||||
|
||||
In the example above we reuse all specs compiled with ``gcc`` from the local store
|
||||
and remote buildcaches, and we also reuse ``foo %oneapi``. Note that the last source of
|
||||
specs override the default ``include`` attribute.
|
||||
|
||||
For one-off concretizations, the are command line arguments for each of the simple "single value"
|
||||
configurations. This means a user can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
to enable reuse for a single installation, or:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: dependencies`` is the default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = "Ninja"
|
||||
generator("ninja")
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
|
@@ -718,23 +718,45 @@ command-line tool, or C/C++/Fortran program with optional Python
|
||||
modules? The former should be prepended with ``py-``, while the
|
||||
latter should not.
|
||||
|
||||
""""""""""""""""""""""
|
||||
extends vs. depends_on
|
||||
""""""""""""""""""""""
|
||||
""""""""""""""""""""""""""""""
|
||||
``extends`` vs. ``depends_on``
|
||||
""""""""""""""""""""""""""""""
|
||||
|
||||
This is very similar to the naming dilemma above, with a slight twist.
|
||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||
that the extension and extendee share the same prefix in views.
|
||||
This allows the user to import a Python module without
|
||||
having to add that module to ``PYTHONPATH``.
|
||||
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
don't use ``extends``.
|
||||
Additionally, ``extends("python")`` adds a dependency on the package
|
||||
``python-venv``. This improves isolation from the system, whether
|
||||
it's during the build or at runtime: user and system site packages
|
||||
cannot accidentally be used by any package that ``extends("python")``.
|
||||
|
||||
As a rule of thumb: if a package does not install any Python modules
|
||||
of its own, and merely puts a Python script in the ``bin`` directory,
|
||||
then there is no need for ``extends``. If the package installs modules
|
||||
in the ``site-packages`` directory, it requires ``extends``.
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
Executing ``python`` during the build
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Whenever you need to execute a Python command or pass the path of the
|
||||
Python interpreter to the build system, it is best to use the global
|
||||
variable ``python`` directly. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_before("install")
|
||||
def recythonize(self):
|
||||
python("setup.py", "clean") # use the `python` global
|
||||
|
||||
As mentioned in the previous section, ``extends("python")`` adds an
|
||||
automatic dependency on ``python-venv``, which is a virtual environment
|
||||
that guarantees build isolation. The ``python`` global always refers to
|
||||
the correct Python interpreter, whether the package uses ``extends("python")``
|
||||
or ``depends_on("python")``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. chain:
|
||||
|
||||
============================
|
||||
Chaining Spack Installations
|
||||
============================
|
||||
=============================================
|
||||
Chaining Spack Installations (upstreams.yaml)
|
||||
=============================================
|
||||
|
||||
You can point your Spack installation to another installation to use any
|
||||
packages that are installed there. To register the other Spack instance,
|
||||
|
@@ -145,6 +145,25 @@ hosts when making ``ssl`` connections. Set to ``false`` to disable, and
|
||||
tools like ``curl`` will use their ``--insecure`` options. Disabling
|
||||
this can expose you to attacks. Use at your own risk.
|
||||
|
||||
--------------------
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to an absolute file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||
in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||
Certificates relative to an environment can be created by prepending the path variable
|
||||
with the Spack configuration variable``$env``.
|
||||
|
||||
--------------------
|
||||
``checksum``
|
||||
--------------------
|
||||
|
@@ -194,15 +194,15 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Operating System
|
||||
- Base Image
|
||||
- Spack Image
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - Ubuntu 20.04
|
||||
- ``ubuntu:20.04``
|
||||
- ``spack/ubuntu-focal``
|
||||
* - Ubuntu 22.04
|
||||
- ``ubuntu:22.04``
|
||||
- ``spack/ubuntu-jammy``
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
@@ -227,12 +227,12 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
* - Fedora Linux 39
|
||||
- ``fedora:39``
|
||||
- ``spack/fedora39``
|
||||
* - Fedora Linux 40
|
||||
- ``fedora:40``
|
||||
- ``spack/fedora40``
|
||||
|
||||
|
||||
|
||||
|
@@ -184,7 +184,7 @@ Style Tests
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
|
@@ -552,11 +552,11 @@ With either interpreter you can run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'import distro; distro.linux_distribution()'
|
||||
('Ubuntu', '18.04', 'Bionic Beaver')
|
||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
...
|
||||
|
||||
$ spack python -i ipython -c 'import distro; distro.linux_distribution()'
|
||||
Out[1]: ('Ubuntu', '18.04', 'Bionic Beaver')
|
||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
Out[1]: ...
|
||||
|
||||
or a file:
|
||||
|
||||
@@ -716,27 +716,27 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
o branch: develop (latest version, v0.23.0.dev0)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
| o branch: releases/v0.22, tag: v0.22.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
| o tag: v0.22.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
| o branch: releases/v0.21, tag: v0.21.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
| o tag: v0.21.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
| o tag: v0.21.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
@@ -747,8 +747,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -778,27 +778,40 @@ for more details.
|
||||
Scheduling work for releases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We schedule work for releases by creating `GitHub projects
|
||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||
several open release projects. For example, below are two releases (from
|
||||
some past version of the page linked above):
|
||||
We schedule work for **major releases** through `milestones
|
||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||
<https://github.com/spack/spack/labels>`_.
|
||||
|
||||
.. image:: images/projects.png
|
||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||
when the release is made, and a new milestone is created for the next major release.
|
||||
|
||||
This image shows one release in progress for ``0.15.1`` and another for
|
||||
``0.16.0``. Each of these releases has a project board containing issues
|
||||
and pull requests. GitHub shows a status bar with completed work in
|
||||
green, work in progress in purple, and work not started yet in gray, so
|
||||
it's fairly easy to see progress.
|
||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||
the project board.
|
||||
|
||||
Spack's project boards are not firm commitments so we move work between
|
||||
releases frequently. If we need to make a release and some tasks are not
|
||||
yet done, we will simply move them to the next minor or major release, rather
|
||||
than delaying the release to complete them.
|
||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||
release milestone, rather than delaying the release to complete them.
|
||||
|
||||
For more on using GitHub project boards, see `GitHub's documentation
|
||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Backporting bug fixes
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||
that the bug fix should be backported to.
|
||||
|
||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||
Typically there are one or two backport pull requests open at any given time.
|
||||
|
||||
.. _major-releases:
|
||||
|
||||
@@ -806,25 +819,21 @@ For more on using GitHub project boards, see `GitHub's documentation
|
||||
Making major releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the major release are:
|
||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||
are:
|
||||
|
||||
#. Create two new project boards:
|
||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||
release.
|
||||
|
||||
* One for the next major release
|
||||
* One for the next point release
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||
|
||||
#. Move any optional tasks that are not done to one of the new project boards.
|
||||
|
||||
In general, small bugfixes should go to the next point release. Major
|
||||
features, refactors, and changes that could affect concretization should
|
||||
go in the next major release.
|
||||
#. Move any optional tasks that are not done to the next milestone.
|
||||
|
||||
#. Create a branch for the release, based on ``develop``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout -b releases/v0.15 develop
|
||||
$ git checkout -b releases/v0.23 develop
|
||||
|
||||
For a version ``vX.Y.Z``, the branch's name should be
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
@@ -860,8 +869,8 @@ completed, the steps to make the major release are:
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
For instance when you have just released ``v0.23.0``, set the version
|
||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -870,82 +879,52 @@ completed, the steps to make the major release are:
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _point-releases:
|
||||
.. _patch-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making point releases
|
||||
Making patch releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the point release are:
|
||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||
changes are fresh in the mind of the developer.
|
||||
|
||||
#. Create a new project board for the next point release.
|
||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||
|
||||
#. Move any optional tasks that are not done to the next project board.
|
||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||
``backports/vX.Y.Z`` branch.
|
||||
|
||||
#. Check out the release branch (it should already exist).
|
||||
.. warning::
|
||||
|
||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and/or the resulting differences.
|
||||
|
||||
.. code-block:: console
|
||||
1. If the changes are small, you might just cherry-pick it.
|
||||
|
||||
$ git checkout releases/v0.15
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a patch release, in which case you should remove
|
||||
the label from the pull request. Remember that large, manual backports
|
||||
are seldom the right choice for a patch release.
|
||||
|
||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||
in the project, create it. This pull request ought to be created as early as
|
||||
possible when working on a release project, so that we can build the release
|
||||
commits incrementally, and identify potential conflicts at an early stage.
|
||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||
release as follows:
|
||||
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||
release.
|
||||
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests. That means there is only one commit
|
||||
per pull request to cherry-pick. For example, `this pull request
|
||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||
they were squashed into a single commit on merge. You can see the
|
||||
commit that was created here:
|
||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||
|
||||
.. image:: images/pr-commit.png
|
||||
|
||||
You can easily cherry pick it like this (assuming you already have the
|
||||
release branch checked out):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick 7e46da7
|
||||
|
||||
For pull requests that were rebased (or not squashed), you'll need to
|
||||
cherry-pick each associated commit individually.
|
||||
|
||||
.. warning::
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and or the resulting differences.
|
||||
|
||||
1. If the dependency changes are small, you might just cherry-pick it,
|
||||
too. If you do this, add the task to the release board.
|
||||
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a point release, in which case you should remove
|
||||
the task from the release project.
|
||||
|
||||
3. You can always decide to manually back-port the fix to the release
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. When all the commits from the project board are cherry-picked into
|
||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||
|
||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
@@ -954,20 +933,22 @@ completed, the steps to make the point release are:
|
||||
release branch. See `the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
#. Make sure CI passes on the **backports pull request**, including:
|
||||
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
If CI does not pass, you'll need to figure out why, and make changes
|
||||
to the release branch until it does. You can make more commits, modify
|
||||
or remove cherry-picked commits, or cherry-pick **more** from
|
||||
``develop`` to make this happen.
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the last commit of the **release branch**.
|
||||
|
||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||
repeat the process. Avoid repeated force pushes to the release branch.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -1042,25 +1023,31 @@ Updating `releases/latest`
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
release is currently ``0.15.3``:
|
||||
release is currently ``0.22.3``:
|
||||
|
||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||
|
||||
* If you are making a new release of an **older** major version of
|
||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||
``releases/latest`` (as there are newer major versions).
|
||||
|
||||
To tag ``releases/latest``, do this:
|
||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||
$ git tag --force releases/latest
|
||||
$ git push --force --tags
|
||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git tag --force releases/latest vX.Y.Z
|
||||
$ git push --force git@github.com:spack/spack releases/latest
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||
local tags.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
@@ -1071,9 +1058,9 @@ Announcing a release
|
||||
|
||||
We announce releases in all of the major Spack communication channels.
|
||||
Publishing the release takes care of GitHub. The remaining channels are
|
||||
Twitter, Slack, and the mailing list. Here are the steps:
|
||||
X, Slack, and the mailing list. Here are the steps:
|
||||
|
||||
#. Announce the release on Twitter.
|
||||
#. Announce the release on X.
|
||||
|
||||
* Compose the tweet on the ``@spackpm`` account per the
|
||||
``spack-twitter`` slack channel.
|
||||
|
@@ -142,12 +142,8 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
||||
environments, anonymous environments are explained in the next section,
|
||||
can both be created using the same flags that `spack env create` accepts.
|
||||
If an environment already exists then spack will simply activate it and ignore the
|
||||
create specific flags.
|
||||
The ``activate`` command can also be used to create a new environment if it does not already
|
||||
exist.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -176,21 +172,36 @@ environment will remove the view from the user environment.
|
||||
Anonymous Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Any directory can be treated as an environment if it contains a file
|
||||
``spack.yaml``. To load an anonymous environment, use:
|
||||
Apart from managed environments, Spack also supports anonymous environments.
|
||||
|
||||
Anonymous environments can be placed in any directory of choice.
|
||||
|
||||
.. note::
|
||||
|
||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||
that are still used in a managed environment. This is not the case for anonymous
|
||||
environments.
|
||||
|
||||
To create an anonymous environment, use one of the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate -d /path/to/directory
|
||||
$ spack env create --dir my_env
|
||||
$ spack env create ./my_env
|
||||
|
||||
Anonymous specs can be created in place using the command:
|
||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||
already exist:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create -d .
|
||||
$ spack env activate --create ./my_env
|
||||
|
||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --temp
|
||||
|
||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
||||
directory.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment Sensitive Commands
|
||||
@@ -449,6 +460,125 @@ Sourcing that file in Bash will make the environment available to the
|
||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||
file may also be copied out of the environment, renamed, etc.
|
||||
|
||||
|
||||
.. _environment_include_concrete:
|
||||
|
||||
------------------------------
|
||||
Included Concrete Environments
|
||||
------------------------------
|
||||
|
||||
Spack environments can create an environment based off of information in already
|
||||
established environments. You can think of it as a combination of existing
|
||||
environments. It will gather information from the existing environment's
|
||||
``spack.lock`` and use that during the creation of this included concrete
|
||||
environment. When an included concrete environment is created it will generate
|
||||
a ``spack.lock`` file for the newly created environment.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Creating included environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
To create a combined concrete environment, you must have at least one existing
|
||||
concrete environment. You will use the command ``spack env create`` with the
|
||||
argument ``--include-concrete`` followed by the name or path of the environment
|
||||
you'd like to include. Here is an example of how to create a combined environment
|
||||
from the command line.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||
absolute path to the independent environments.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /absolute/path/to/environment1
|
||||
- /absolute/path/to/environment2
|
||||
|
||||
|
||||
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||
get the concrete specs from the included environments.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating an included environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
If changes were made to the base environment and you want that reflected in the
|
||||
included environment you will need to reconcretize both the base environment and the
|
||||
included environment for the change to be implemented. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
Here we see that ``included_env`` has access to the python package through
|
||||
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||
``included_env`` will not be able to access the new information.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv add perl
|
||||
$ spack -e myenv concretize
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
It isn't until you run the ``spack concretize`` command that the combined
|
||||
environment will get the updated information from the reconcretized base environmennt.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e included_env concretize
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
.. _environment-configuration:
|
||||
|
||||
------------------------
|
||||
@@ -800,6 +930,7 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
@@ -1033,7 +1164,7 @@ other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
@@ -1126,7 +1257,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env depfile -o Makefile
|
||||
|
||||
@@ -1148,7 +1279,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
||||
variable. Assuming we have an active and concrete environment, we generate the
|
||||
associated ``Makefile`` with a prefix ``example``:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env depfile -o env.mk --make-prefix example
|
||||
|
||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install curl gcc git gnupg zip
|
||||
brew install gcc git zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
@@ -478,6 +478,13 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||
all other fields from the compilers config can be added to the
|
||||
``extra_attributes`` field for an external representing a compiler.
|
||||
|
||||
Note that the format for the ``paths`` field in the
|
||||
``extra_attributes`` section is different than in the ``compilers``
|
||||
config. For compilers configured as external packages, the section is
|
||||
named ``compilers`` and the dictionary maps language names (``c``,
|
||||
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||
``fc``, and ``f77``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
@@ -493,11 +500,10 @@ all other fields from the compilers config can be added to the
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
paths:
|
||||
cc: /usr/bin/clang-with-suffix
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fc: /usr/bin/gfortran
|
||||
f77: /usr/bin/gfortran
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
@@ -1572,6 +1578,8 @@ Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||
visual studio installation as it is required to build many packages from source.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
@@ -1579,6 +1587,7 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 44 KiB |
Binary file not shown.
Before Width: | Height: | Size: 68 KiB |
@@ -12,10 +12,6 @@
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
|
@@ -893,26 +893,50 @@ as an option to the ``version()`` directive. Example situations would be a
|
||||
"snapshot"-like Version Control System (VCS) tag, a VCS branch such as
|
||||
``v6-16-00-patches``, or a URL specifying a regularly updated snapshot tarball.
|
||||
|
||||
|
||||
.. _version-comparison:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Version comparison
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack imposes a generic total ordering on the set of versions,
|
||||
independently from the package they are associated with.
|
||||
|
||||
Most Spack versions are numeric, a tuple of integers; for example,
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. Spack knows how to compare and sort
|
||||
numeric versions.
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. In this very basic case, version
|
||||
comparison is lexicographical on the numeric components:
|
||||
``1.2 < 1.2.1 < 1.2.2 < 1.10``.
|
||||
|
||||
Some Spack versions involve slight extensions of numeric syntax; for
|
||||
example, ``py-sphinx-rtd-theme@=0.1.10a0``. In this case, numbers are
|
||||
always considered to be "newer" than letters. This is for consistency
|
||||
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
|
||||
Spack can also supports string components such as ``1.1.1a`` and
|
||||
``1.y.0``. String components are considered less than numeric
|
||||
components, so ``1.y.0 < 1.0``. This is for consistency with
|
||||
`RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_. String
|
||||
components do not have to be separated by dots or any other delimiter.
|
||||
So, the contrived version ``1y0`` is identical to ``1.y.0``.
|
||||
|
||||
Spack versions may also be arbitrary non-numeric strings, for example
|
||||
``develop``, ``master``, ``local``.
|
||||
Pre-release suffixes also contain string parts, but they are handled
|
||||
in a special way. For example ``1.2.3alpha1`` is parsed as a pre-release
|
||||
of the version ``1.2.3``. This allows Spack to order it before the
|
||||
actual release: ``1.2.3alpha1 < 1.2.3``. Spack supports alpha, beta and
|
||||
release candidate suffixes: ``1.2alpha1 < 1.2beta1 < 1.2rc1 < 1.2``. Any
|
||||
suffix not recognized as a pre-release is treated as an ordinary
|
||||
string component, so ``1.2 < 1.2-mysuffix``.
|
||||
|
||||
The order on versions is defined as follows. A version string is split
|
||||
into a list of components based on delimiters such as ``.``, ``-`` etc.
|
||||
Lists are then ordered lexicographically, where components are ordered
|
||||
as follows:
|
||||
Finally, there are a few special string components that are considered
|
||||
"infinity versions". They include ``develop``, ``main``, ``master``,
|
||||
``head``, ``trunk``, and ``stable``. For example: ``1.2 < develop``.
|
||||
These are useful for specifying the most recent development version of
|
||||
a package (often a moving target like a git branch), without assigning
|
||||
a specific version number. Infinity versions are not automatically used when determining the latest version of a package unless explicitly required by another package or user.
|
||||
|
||||
More formally, the order on versions is defined as follows. A version
|
||||
string is split into a list of components based on delimiters such as
|
||||
``.`` and ``-`` and string boundaries. The components are split into
|
||||
the **release** and a possible **pre-release** (if the last component
|
||||
is numeric and the second to last is a string ``alpha``, ``beta`` or ``rc``).
|
||||
The release components are ordered lexicographically, with comparsion
|
||||
between different types of components as follows:
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
@@ -925,6 +949,9 @@ as follows:
|
||||
#. All other non-numeric components are less than numeric components,
|
||||
and are ordered alphabetically.
|
||||
|
||||
Finally, if the release components are equal, the pre-release components
|
||||
are used to break the tie, in the obvious way.
|
||||
|
||||
The logic behind this sort order is two-fold:
|
||||
|
||||
#. Non-numeric versions are usually used for special cases while
|
||||
@@ -2415,15 +2442,14 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
||||
|
||||
depends_on("libelf@0.8")
|
||||
|
||||
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
||||
You can also specify a requirement for a particular variant or for
|
||||
specific compiler flags:
|
||||
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
||||
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
||||
restrictions, you can also specify variants if this package requires
|
||||
optional features of the dependency.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("libelf@0.8+debug")
|
||||
depends_on("libelf debug=True")
|
||||
depends_on("libelf cppflags='-fPIC'")
|
||||
depends_on("libelf@0.8 +parser +pic")
|
||||
|
||||
Both users *and* package authors can use the same spec syntax to refer
|
||||
to different package configurations. Users use the spec syntax on the
|
||||
@@ -2431,46 +2457,82 @@ command line to find installed packages or to install packages with
|
||||
particular constraints, and package authors can use specs to describe
|
||||
relationships between packages.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying backward and forward compatibility
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
Packages are often compatible with a range of versions of their
|
||||
dependencies. This is typically referred to as backward and forward
|
||||
compatibility. Spack allows you to specify this in the ``depends_on``
|
||||
directive using version ranges.
|
||||
|
||||
**Backwards compatibility** means that the package requires at least a
|
||||
certain version of its dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.4:2.6")
|
||||
depends_on("python@3.10:")
|
||||
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
In this case, the package requires Python 3.10 or newer.
|
||||
|
||||
Commonly, packages drop support for older versions of a dependency as
|
||||
they release new versions. In Spack you can conveniently add every
|
||||
backward compatibility rule as a separate line:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3:")
|
||||
# backward compatibility with Python
|
||||
depends_on("python@3.8:")
|
||||
depends_on("python@3.9:", when="@1.2:")
|
||||
depends_on("python@3.10:", when="@1.4:")
|
||||
|
||||
Here we leave out the upper bound. If you want to say that a package
|
||||
requires Python 2, you can similarly leave out the lower bound:
|
||||
This means that in general we need Python 3.8 or newer; from version
|
||||
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
||||
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
||||
ranges in the ``when`` clauses.
|
||||
|
||||
**Forward compatibility** means that the package requires at most a
|
||||
certain version of its dependency. Forward compatibility rules are
|
||||
necessary when there are breaking changes in the dependency that the
|
||||
package cannot handle. In Spack we often add forward compatibility
|
||||
bounds only at the time a new, breaking version of a dependency is
|
||||
released. As with backward compatibility, it is typical to see a list
|
||||
of forward compatibility bounds in a package file as seperate lines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@:2")
|
||||
# forward compatibility with Python
|
||||
depends_on("python@:3.12", when="@:1.10")
|
||||
depends_on("python@:3.13", when="@:1.12")
|
||||
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
Notice how the ``:`` now appears before the version number both in the
|
||||
dependency and in the ``when`` clause. This tells Spack that in general
|
||||
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
||||
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
||||
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
||||
compatibility with Python 3.14.
|
||||
|
||||
You can also simply write
|
||||
Notice that a version range ``@:3.12`` includes *any* patch version
|
||||
number ``3.12.x``, which is often useful when specifying forward compatibility
|
||||
bounds.
|
||||
|
||||
So far we have seen open-ended version ranges, which is by far the most
|
||||
common use case. It is also possible to specify both a lower and an upper bound
|
||||
on the version of a dependency, like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.7")
|
||||
depends_on("python@3.10:3.12")
|
||||
|
||||
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
||||
``@2.7:2.7``.
|
||||
There is short syntax to specify that a package is compatible with say any
|
||||
``3.x`` version:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3")
|
||||
|
||||
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
||||
Python version 3 and at most any version ``3.x.y``.
|
||||
|
||||
In very rare cases, you may need to specify an exact version, for example
|
||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||
@@ -6408,9 +6470,12 @@ the ``paths`` attribute:
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
platforms: ["linux", "darwin"]
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
If the ``platforms`` attribute is present, tests are run only if the current host
|
||||
matches one of the listed platforms.
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
@@ -6444,6 +6509,10 @@ package detection and checking that the outcome matches the expected
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
* - ``results:[0]:extra_attributes``
|
||||
- Extra attributes expected on the associated Spec
|
||||
- Nested dictionary with string as keys, and regular expressions as leaf values
|
||||
- No
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
|
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.1.1
|
||||
pytest==8.2.0
|
||||
isort==5.13.2
|
||||
black==24.3.0
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
mypy==1.9.0
|
||||
mypy==1.10.0
|
||||
|
255
lib/spack/env/cc
vendored
255
lib/spack/env/cc
vendored
@@ -47,7 +47,8 @@ SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS"
|
||||
SPACK_SYSTEM_DIRS
|
||||
SPACK_MANAGED_DIRS"
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
@@ -173,22 +174,6 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# system_dir PATH
|
||||
# test whether a path is a system directory
|
||||
system_dir() {
|
||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
||||
path="$1"
|
||||
for sd in $SPACK_SYSTEM_DIRS; do
|
||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
||||
# success if path starts with a system prefix
|
||||
unset IFS
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
return 1 # fail if path starts no system prefix
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
@@ -201,6 +186,18 @@ for param in $params; do
|
||||
fi
|
||||
done
|
||||
|
||||
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||
# moving the eval inside the function would eval it every call.
|
||||
eval "\
|
||||
path_order() {
|
||||
case \"\$1\" in
|
||||
$SPACK_MANAGED_DIRS) return 0 ;;
|
||||
$SPACK_SYSTEM_DIRS) return 2 ;;
|
||||
/*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
"
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -248,7 +245,7 @@ case "$command" in
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
@@ -420,11 +417,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -432,21 +430,25 @@ parse_Wl() {
|
||||
arg="${1#-rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -473,12 +475,20 @@ categorize_arguments() {
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
@@ -526,7 +536,7 @@ categorize_arguments() {
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
@@ -546,29 +556,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -601,29 +614,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -661,16 +677,25 @@ categorize_arguments() {
|
||||
}
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -730,7 +755,7 @@ esac
|
||||
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
ccld)
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -738,16 +763,25 @@ esac
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
@@ -767,11 +801,13 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
@@ -798,38 +834,50 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend include_dirs_list SPACK_INCLUDE_DIRS
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Finally, reassemble the command line.
|
||||
#
|
||||
args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
@@ -839,8 +887,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
@@ -848,8 +900,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
@@ -913,4 +969,3 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
||||
|
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,3 +1,3 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
|
||||
__version__ = "0.2.3"
|
||||
__version__ = "0.2.4"
|
||||
|
9
lib/spack/external/archspec/cpu/__init__.py
vendored
9
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,9 +5,10 @@
|
||||
"""The "cpu" package permits to query and compare different
|
||||
CPU microarchitectures.
|
||||
"""
|
||||
from .detect import host
|
||||
from .detect import brand_string, host
|
||||
from .microarchitecture import (
|
||||
TARGETS,
|
||||
InvalidCompilerVersion,
|
||||
Microarchitecture,
|
||||
UnsupportedMicroarchitecture,
|
||||
generic_microarchitecture,
|
||||
@@ -15,10 +16,12 @@
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"brand_string",
|
||||
"host",
|
||||
"TARGETS",
|
||||
"InvalidCompilerVersion",
|
||||
"Microarchitecture",
|
||||
"UnsupportedMicroarchitecture",
|
||||
"TARGETS",
|
||||
"generic_microarchitecture",
|
||||
"host",
|
||||
"version_components",
|
||||
]
|
||||
|
54
lib/spack/external/archspec/cpu/detect.py
vendored
54
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,7 +47,11 @@ def decorator(factory):
|
||||
|
||||
|
||||
def partial_uarch(
|
||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||
name: str = "",
|
||||
vendor: str = "",
|
||||
features: Optional[Set[str]] = None,
|
||||
generation: int = 0,
|
||||
cpu_part: str = "",
|
||||
) -> Microarchitecture:
|
||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||
return Microarchitecture(
|
||||
@@ -57,6 +61,7 @@ def partial_uarch(
|
||||
features=features or set(),
|
||||
compilers={},
|
||||
generation=generation,
|
||||
cpu_part=cpu_part,
|
||||
)
|
||||
|
||||
|
||||
@@ -90,6 +95,7 @@ def proc_cpuinfo() -> Microarchitecture:
|
||||
return partial_uarch(
|
||||
vendor=_canonicalize_aarch64_vendor(data),
|
||||
features=_feature_set(data, key="Features"),
|
||||
cpu_part=data.get("CPU part", ""),
|
||||
)
|
||||
|
||||
if architecture in (PPC64LE, PPC64):
|
||||
@@ -155,6 +161,31 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
|
||||
mask = 1 << bit
|
||||
return register & mask > 0
|
||||
|
||||
def brand_string(self) -> Optional[str]:
|
||||
"""Returns the brand string, if available."""
|
||||
if self.highest_extension_support < 0x80000004:
|
||||
return None
|
||||
|
||||
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
|
||||
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
|
||||
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
|
||||
result = struct.pack(
|
||||
"IIIIIIIIIIII",
|
||||
r1.eax,
|
||||
r1.ebx,
|
||||
r1.ecx,
|
||||
r1.edx,
|
||||
r2.eax,
|
||||
r2.ebx,
|
||||
r2.ecx,
|
||||
r2.edx,
|
||||
r3.eax,
|
||||
r3.ebx,
|
||||
r3.ecx,
|
||||
r3.edx,
|
||||
).decode("utf-8")
|
||||
return result.strip("\x00")
|
||||
|
||||
|
||||
@detection(operating_system="Windows")
|
||||
def cpuid_info():
|
||||
@@ -174,8 +205,8 @@ def _check_output(args, env):
|
||||
|
||||
|
||||
WINDOWS_MAPPING = {
|
||||
"AMD64": "x86_64",
|
||||
"ARM64": "aarch64",
|
||||
"AMD64": X86_64,
|
||||
"ARM64": AARCH64,
|
||||
}
|
||||
|
||||
|
||||
@@ -320,6 +351,10 @@ def sorting_fn(item):
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
||||
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
||||
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
@@ -409,3 +444,16 @@ def compatibility_check_for_riscv64(info, target):
|
||||
return (target == arch_root or arch_root in target.ancestors) and (
|
||||
target.name == info.name or target.vendor == "generic"
|
||||
)
|
||||
|
||||
|
||||
def brand_string() -> Optional[str]:
|
||||
"""Returns the brand string of the host, if detected, or None."""
|
||||
if platform.system() == "Darwin":
|
||||
return _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if host().family == X86_64:
|
||||
return CpuidInfoCollector().brand_string()
|
||||
|
||||
return None
|
||||
|
@@ -2,9 +2,7 @@
|
||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Types and functions to manage information
|
||||
on CPU microarchitectures.
|
||||
"""
|
||||
"""Types and functions to manage information on CPU microarchitectures."""
|
||||
import functools
|
||||
import platform
|
||||
import re
|
||||
@@ -65,23 +63,31 @@ class Microarchitecture:
|
||||
passed in as argument above.
|
||||
* versions: versions that support this micro-architecture.
|
||||
|
||||
generation (int): generation of the micro-architecture, if
|
||||
relevant.
|
||||
generation (int): generation of the micro-architecture, if relevant.
|
||||
cpu_part (str): cpu part of the architecture, if relevant.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||
#: Aliases for micro-architecture's features
|
||||
feature_aliases = FEATURE_ALIASES
|
||||
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
||||
self.name = name
|
||||
self.parents = parents
|
||||
self.vendor = vendor
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
# Only relevant for PowerPC
|
||||
self.generation = generation
|
||||
# Cache the ancestor computation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
|
||||
# Cache the "ancestor" computation
|
||||
self._ancestors = None
|
||||
# Cache the "generic" computation
|
||||
self._generic = None
|
||||
# Cache the "family" computation
|
||||
self._family = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
@@ -111,8 +117,12 @@ def __eq__(self, other):
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.name)
|
||||
|
||||
@coerce_target_names
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
@@ -143,7 +153,8 @@ def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = (
|
||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
||||
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
||||
"cpu_part={0.cpu_part!r})"
|
||||
)
|
||||
return fmt.format(self)
|
||||
|
||||
@@ -168,18 +179,22 @@ def __contains__(self, feature):
|
||||
@property
|
||||
def family(self):
|
||||
"""Returns the architecture family a given target belongs to"""
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
if self._family is None:
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
self._family = roots.pop()
|
||||
|
||||
return roots.pop()
|
||||
return self._family
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
if self._generic is None:
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||
return self._generic
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary representation of this object."""
|
||||
@@ -190,6 +205,7 @@ def to_dict(self):
|
||||
"generation": self.generation,
|
||||
"parents": [str(x) for x in self.parents],
|
||||
"compilers": self.compilers,
|
||||
"cpupart": self.cpu_part,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -202,12 +218,15 @@ def from_dict(data) -> "Microarchitecture":
|
||||
features=set(data["features"]),
|
||||
compilers=data.get("compilers", {}),
|
||||
generation=data.get("generation", 0),
|
||||
cpu_part=data.get("cpupart", ""),
|
||||
)
|
||||
|
||||
def optimization_flags(self, compiler, version):
|
||||
"""Returns a string containing the optimization flags that needs
|
||||
to be used to produce code optimized for this micro-architecture.
|
||||
|
||||
The version is expected to be a string of dot separated digits.
|
||||
|
||||
If there is no information on the compiler passed as argument the
|
||||
function returns an empty string. If it is known that the compiler
|
||||
version we want to use does not support this architecture the function
|
||||
@@ -216,6 +235,11 @@ def optimization_flags(self, compiler, version):
|
||||
Args:
|
||||
compiler (str): name of the compiler to be used
|
||||
version (str): version of the compiler to be used
|
||||
|
||||
Raises:
|
||||
UnsupportedMicroarchitecture: if the requested compiler does not support
|
||||
this micro-architecture.
|
||||
ValueError: if the version doesn't match the expected format
|
||||
"""
|
||||
# If we don't have information on compiler at all return an empty string
|
||||
if compiler not in self.family.compilers:
|
||||
@@ -232,6 +256,14 @@ def optimization_flags(self, compiler, version):
|
||||
msg = msg.format(compiler, best_target, best_target.family)
|
||||
raise UnsupportedMicroarchitecture(msg)
|
||||
|
||||
# Check that the version matches the expected format
|
||||
if not re.match(r"^(?:\d+\.)*\d+$", version):
|
||||
msg = (
|
||||
"invalid format for the compiler version argument. "
|
||||
"Only dot separated digits are allowed."
|
||||
)
|
||||
raise InvalidCompilerVersion(msg)
|
||||
|
||||
# If we have information on this compiler we need to check the
|
||||
# version being used
|
||||
compiler_info = self.compilers[compiler]
|
||||
@@ -292,7 +324,7 @@ def generic_microarchitecture(name):
|
||||
Args:
|
||||
name (str): name of the micro-architecture
|
||||
"""
|
||||
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
|
||||
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
|
||||
|
||||
|
||||
def version_components(version):
|
||||
@@ -345,8 +377,11 @@ def fill_target_from_dict(name, data, targets):
|
||||
features = set(values["features"])
|
||||
compilers = values.get("compilers", {})
|
||||
generation = values.get("generation", 0)
|
||||
cpu_part = values.get("cpupart", "")
|
||||
|
||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||
targets[name] = Microarchitecture(
|
||||
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
||||
)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
@@ -367,7 +402,15 @@ def fill_target_from_dict(name, data, targets):
|
||||
TARGETS = LazyDictionary(_known_microarchitectures)
|
||||
|
||||
|
||||
class UnsupportedMicroarchitecture(ValueError):
|
||||
class ArchspecError(Exception):
|
||||
"""Base class for errors within archspec"""
|
||||
|
||||
|
||||
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
|
||||
"""Raised if a compiler version does not support optimization for a given
|
||||
micro-architecture.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidCompilerVersion(ArchspecError, ValueError):
|
||||
"""Raised when an invalid format is used for compiler versions in archspec."""
|
||||
|
@@ -1482,7 +1482,6 @@
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
@@ -2225,14 +2224,96 @@
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:",
|
||||
"versions": "21.11:23.8",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
||||
},
|
||||
{
|
||||
"versions": "23.9:",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen5": {
|
||||
"from": ["zen4"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"abm",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"avx512_bf16",
|
||||
"avx512_bitalg",
|
||||
"avx512bw",
|
||||
"avx512cd",
|
||||
"avx512dq",
|
||||
"avx512f",
|
||||
"avx512ifma",
|
||||
"avx512vbmi",
|
||||
"avx512_vbmi2",
|
||||
"avx512vl",
|
||||
"avx512_vnni",
|
||||
"avx512_vp2intersect",
|
||||
"avx512_vpopcntdq",
|
||||
"avx_vnni",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"clwb",
|
||||
"clzero",
|
||||
"cppc",
|
||||
"cx16",
|
||||
"f16c",
|
||||
"flush_l1d",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"gfni",
|
||||
"ibrs_enhanced",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"tsc_adjust",
|
||||
"vaes",
|
||||
"vpclmulqdq",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "14.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "5.0:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "19.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
@@ -2711,7 +2792,8 @@
|
||||
"flags": "-mcpu=thunderx2t99"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x0af"
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["armv8.2a"],
|
||||
@@ -2779,7 +2861,8 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x001"
|
||||
},
|
||||
"cortex_a72": {
|
||||
"from": ["aarch64"],
|
||||
@@ -2816,7 +2899,8 @@
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd08"
|
||||
},
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
@@ -2837,8 +2921,7 @@
|
||||
"asimdrdm",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"asimddp",
|
||||
"ssbs"
|
||||
"asimddp"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -2902,7 +2985,8 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd0c"
|
||||
},
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
@@ -2926,8 +3010,6 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
@@ -2936,9 +3018,6 @@
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
@@ -3006,7 +3085,7 @@
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng"
|
||||
},
|
||||
{
|
||||
"versions": "12:",
|
||||
@@ -3030,7 +3109,8 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd40"
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
@@ -3054,35 +3134,22 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh",
|
||||
"bti"
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -3107,15 +3174,19 @@
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.99",
|
||||
"versions": "10.0:11.3.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.4:11.99",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.99",
|
||||
"versions": "12.0:12.2.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"versions": "12.3:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
@@ -3150,7 +3221,112 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd4f"
|
||||
},
|
||||
"neoverse_n2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd49"
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
@@ -3216,7 +3392,8 @@
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x022"
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
@@ -3294,7 +3471,8 @@
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x032"
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
|
@@ -52,6 +52,9 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpupart": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -107,4 +110,4 @@
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
index 1badeda585..892c868af3 100644
|
||||
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
@@ -98,3 +98,10 @@ def path_filter_caller(*args, **kwargs):
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
|
||||
|
||||
def sanitize_win_longpath(path: str) -> str:
|
||||
"""Strip Windows extended path prefix from strings
|
||||
Returns sanitized string.
|
||||
no-op if extended path prefix is not present"""
|
||||
return path.lstrip("\\\\?\\")
|
||||
|
@@ -12,7 +12,7 @@
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz", "whl")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
@@ -357,10 +357,8 @@ def strip_version_suffixes(path_or_url: str) -> str:
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
# PyPI wheels
|
||||
r"-(?:py|cp)[23].*",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
@@ -403,7 +401,7 @@ def expand_contracted_extension_in_path(
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
for ext in EXTENSIONS:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
@@ -187,26 +187,58 @@ def polite_filename(filename: str) -> str:
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid():
|
||||
def getuid() -> Union[str, int]:
|
||||
"""Returns os getuid on non Windows
|
||||
On Windows returns 0 for admin users, login string otherwise
|
||||
This is in line with behavior from get_owner_uid which
|
||||
always returns the login string on Windows
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
# If not admin, use the string name of the login as a unique ID
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return 1
|
||||
return os.getlogin()
|
||||
return 0
|
||||
else:
|
||||
return os.getuid()
|
||||
|
||||
|
||||
def _win_rename(src, dst):
|
||||
# os.replace will still fail if on Windows (but not POSIX) if the dst
|
||||
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
|
||||
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
|
||||
if os.path.samefile(src, dst):
|
||||
# src and dst are the same
|
||||
# do nothing and exit early
|
||||
return
|
||||
# If dst exists and is a symlink to a directory
|
||||
# we need to remove dst and then perform rename/replace
|
||||
# this is safe to do as there's no chance src == dst now
|
||||
os.remove(dst)
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def msdos_escape_parens(path):
|
||||
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||
if sys.platform == "win32":
|
||||
return path.replace("(", "^(").replace(")", "^)")
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
|
||||
# the MOVEFILE_REPLACE_EXISTING flag on Windows
|
||||
# Windows invocation is abstracted behind additonal logic handling
|
||||
# remaining cases of divergent behavior accross platforms
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
_win_rename(src, dst)
|
||||
else:
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -536,7 +568,13 @@ def exploding_archive_handler(tarball_container, stage):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||
"""Returns owner UID of path destination
|
||||
On non Windows this is the value of st_uid
|
||||
On Windows this is the login string associated with the
|
||||
owning user.
|
||||
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
@@ -805,7 +843,7 @@ def copy_tree(
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
target = readlink(s)
|
||||
if os.path.isabs(target):
|
||||
|
||||
def escaped_path(path):
|
||||
@@ -1217,10 +1255,12 @@ def windows_sfn(path: os.PathLike):
|
||||
import ctypes
|
||||
|
||||
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
# Method with null values returns size of short path name
|
||||
sz = k32.GetShortPathNameW(path, None, 0)
|
||||
# stub Windows types TCHAR[LENGTH]
|
||||
TCHAR_arr = ctypes.c_wchar * len(path)
|
||||
TCHAR_arr = ctypes.c_wchar * sz
|
||||
ret_str = TCHAR_arr()
|
||||
k32.GetShortPathNameW(path, ret_str, len(path))
|
||||
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
|
||||
return ret_str.value
|
||||
|
||||
|
||||
@@ -2410,9 +2450,10 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
new_pth = pathlib.Path(pth).parent
|
||||
else:
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
new_pth = pathlib.Path(pth)
|
||||
self._additional_library_dependents.add(new_pth)
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
@@ -2490,8 +2531,14 @@ def establish_link(self):
|
||||
|
||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||
# install a symlink to each dependent library
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
# do not rpath for system libraries included in the dag
|
||||
# we should not be modifying libraries managed by the Windows system
|
||||
# as this will negatively impact linker behavior and can result in permission
|
||||
# errors if those system libs are not modifiable by Spack
|
||||
if "windows-system" not in getattr(self.pkg, "tags", []):
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
@@ -11,7 +11,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from ..path import system_path_filter
|
||||
from ..path import sanitize_win_longpath, system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
@@ -247,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
err_str = err.decode()
|
||||
tty.error(err_str)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
@@ -269,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str):
|
||||
def readlink(path: str, *, dir_fd=None):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return os.readlink(path)
|
||||
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
from typing import Any, NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -158,21 +158,22 @@ def get_timestamp(force=False):
|
||||
return ""
|
||||
|
||||
|
||||
def msg(message, *args, **kwargs):
|
||||
def msg(message: Any, *args: Any, newline: bool = True) -> None:
|
||||
if not msg_enabled():
|
||||
return
|
||||
|
||||
if isinstance(message, Exception):
|
||||
message = "%s: %s" % (message.__class__.__name__, str(message))
|
||||
message = f"{message.__class__.__name__}: {message}"
|
||||
else:
|
||||
message = str(message)
|
||||
|
||||
newline = kwargs.get("newline", True)
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(2)
|
||||
if newline:
|
||||
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
else:
|
||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
|
||||
nl = "\n" if newline else ""
|
||||
cwrite(f"@*b{{{st_text}==>}} {get_timestamp()}{cescape(_output_filter(message))}{nl}")
|
||||
|
||||
for arg in args:
|
||||
print(indent + _output_filter(str(arg)))
|
||||
|
||||
|
@@ -237,7 +237,6 @@ def transpose():
|
||||
def colified(
|
||||
elts: List[Any],
|
||||
cols: int = 0,
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
tty: Optional[bool] = None,
|
||||
|
@@ -59,9 +59,11 @@
|
||||
|
||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class ColorParseError(Exception):
|
||||
@@ -95,14 +97,34 @@ def __init__(self, message):
|
||||
} # white
|
||||
|
||||
# Regex to be used for color formatting
|
||||
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
|
||||
COLOR_RE = re.compile(r"@(?:(@)|(\.)|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)")
|
||||
|
||||
# Mapping from color arguments to values for tty.set_color
|
||||
color_when_values = {"always": True, "auto": None, "never": False}
|
||||
|
||||
# Force color; None: Only color if stdout is a tty
|
||||
# True: Always colorize output, False: Never colorize output
|
||||
_force_color = None
|
||||
|
||||
def _color_when_value(when):
|
||||
"""Raise a ValueError for an invalid color setting.
|
||||
|
||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||
True, False, and None.
|
||||
"""
|
||||
if when in color_when_values:
|
||||
return color_when_values[when]
|
||||
elif when not in color_when_values.values():
|
||||
raise ValueError("Invalid color setting: %s" % when)
|
||||
return when
|
||||
|
||||
|
||||
def _color_from_environ() -> Optional[bool]:
|
||||
try:
|
||||
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
|
||||
_force_color = _color_from_environ()
|
||||
|
||||
|
||||
def try_enable_terminal_color_on_windows():
|
||||
@@ -163,19 +185,6 @@ def _err_check(result, func, args):
|
||||
debug("Unable to support color on Windows terminal")
|
||||
|
||||
|
||||
def _color_when_value(when):
|
||||
"""Raise a ValueError for an invalid color setting.
|
||||
|
||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||
True, False, and None.
|
||||
"""
|
||||
if when in color_when_values:
|
||||
return color_when_values[when]
|
||||
elif when not in color_when_values.values():
|
||||
raise ValueError("Invalid color setting: %s" % when)
|
||||
return when
|
||||
|
||||
|
||||
def get_color_when():
|
||||
"""Return whether commands should print color or not."""
|
||||
if _force_color is not None:
|
||||
@@ -203,77 +212,64 @@ def color_when(value):
|
||||
set_color_when(old_value)
|
||||
|
||||
|
||||
class match_to_ansi:
|
||||
def __init__(self, color=True, enclose=False, zsh=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
self.zsh = zsh
|
||||
|
||||
def escape(self, s):
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if self.color:
|
||||
if self.zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
result = f"\033[{s}m"
|
||||
|
||||
if self.enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
return result
|
||||
def _escape(s: str, color: bool, enclose: bool, zsh: bool) -> str:
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if color:
|
||||
if zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
return ""
|
||||
result = f"\033[{s}m"
|
||||
|
||||
def __call__(self, match):
|
||||
"""Convert a match object generated by ``color_re`` into an ansi
|
||||
color code. This can be used as a handler in ``re.sub``.
|
||||
"""
|
||||
style, color, text = match.groups()
|
||||
m = match.group(0)
|
||||
if enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
if m == "@@":
|
||||
return "@"
|
||||
elif m == "@.":
|
||||
return self.escape(0)
|
||||
elif m == "@":
|
||||
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
|
||||
|
||||
string = styles[style]
|
||||
if color:
|
||||
if color not in colors:
|
||||
raise ColorParseError(
|
||||
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
|
||||
)
|
||||
string += ";" + str(colors[color])
|
||||
|
||||
colored_text = ""
|
||||
if text:
|
||||
colored_text = text + self.escape(0)
|
||||
|
||||
return self.escape(string) + colored_text
|
||||
return result
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def colorize(string, **kwargs):
|
||||
def colorize(
|
||||
string: str, color: Optional[bool] = None, enclose: bool = False, zsh: bool = False
|
||||
) -> str:
|
||||
"""Replace all color expressions in a string with ANSI control codes.
|
||||
|
||||
Args:
|
||||
string (str): The string to replace
|
||||
string: The string to replace
|
||||
|
||||
Returns:
|
||||
str: The filtered string
|
||||
The filtered string
|
||||
|
||||
Keyword Arguments:
|
||||
color (bool): If False, output will be plain text without control
|
||||
codes, for output to non-console devices.
|
||||
enclose (bool): If True, enclose ansi color sequences with
|
||||
color: If False, output will be plain text without control codes, for output to
|
||||
non-console devices (default: automatically choose color or not)
|
||||
enclose: If True, enclose ansi color sequences with
|
||||
square brackets to prevent misestimation of terminal width.
|
||||
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
zsh: If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
"""
|
||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
||||
zsh = kwargs.get("zsh", False)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
|
||||
string = string.replace("}}", "}")
|
||||
return string
|
||||
color = color if color is not None else get_color_when()
|
||||
|
||||
def match_to_ansi(match):
|
||||
"""Convert a match object generated by ``COLOR_RE`` into an ansi
|
||||
color code. This can be used as a handler in ``re.sub``.
|
||||
"""
|
||||
escaped_at, dot, style, color_code, text = match.groups()
|
||||
|
||||
if escaped_at:
|
||||
return "@"
|
||||
elif dot:
|
||||
return _escape(0, color, enclose, zsh)
|
||||
elif not (style or color_code):
|
||||
raise ColorParseError(
|
||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||
)
|
||||
|
||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||
if text:
|
||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||
else:
|
||||
return ansi_code
|
||||
|
||||
return COLOR_RE.sub(match_to_ansi, string).replace("}}", "}")
|
||||
|
||||
|
||||
def clen(string):
|
||||
@@ -305,7 +301,7 @@ def cprint(string, stream=None, color=None):
|
||||
cwrite(string + "\n", stream, color)
|
||||
|
||||
|
||||
def cescape(string):
|
||||
def cescape(string: str) -> str:
|
||||
"""Escapes special characters needed for color codes.
|
||||
|
||||
Replaces the following symbols with their equivalent literal forms:
|
||||
@@ -321,10 +317,7 @@ def cescape(string):
|
||||
Returns:
|
||||
(str): the string with color codes escaped
|
||||
"""
|
||||
string = str(string)
|
||||
string = string.replace("@", "@@")
|
||||
string = string.replace("}", "}}")
|
||||
return string
|
||||
return string.replace("@", "@@").replace("}", "}}")
|
||||
|
||||
|
||||
class ColorStream:
|
||||
|
@@ -18,9 +18,10 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing.connection import Connection
|
||||
from threading import Thread
|
||||
from types import ModuleType
|
||||
from typing import Optional
|
||||
from typing import Callable, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -329,49 +330,6 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
self._fd = None
|
||||
if sys.version_info >= (3, 8):
|
||||
self._connection = multiprocessing.connection.Connection(fd)
|
||||
else:
|
||||
self._fd = fd
|
||||
|
||||
@property
|
||||
def fd(self):
|
||||
if self._connection:
|
||||
return self._connection._handle
|
||||
else:
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
else:
|
||||
os.close(self._fd)
|
||||
|
||||
|
||||
def close_connection_and_file(multiprocess_fd, file):
|
||||
# MultiprocessFd is intended to transmit a FD
|
||||
# to a child process, this FD is then opened to a Python File object
|
||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||
# multiprocessing.connection.Connection; Connection closes the FD
|
||||
# when it is deleted, and prints a warning about duplicate closure if
|
||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||
# simple FD; closing the FD here appears to conflict with
|
||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||
# to choose whether to close the File or the Connection.
|
||||
if sys.version_info >= (3, 8):
|
||||
multiprocess_fd.close()
|
||||
else:
|
||||
file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
@@ -529,22 +487,20 @@ def __enter__(self):
|
||||
# forcing debug output.
|
||||
self._saved_debug = tty._debug
|
||||
|
||||
# OS-level pipe for redirecting output to logger
|
||||
read_fd, write_fd = os.pipe()
|
||||
# Pipe for redirecting output to logger
|
||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||
|
||||
# Multiprocessing pipe for communication back from the daemon
|
||||
# Pipe for communication back from the daemon
|
||||
# Currently only used to save echo value between uses
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
@@ -553,9 +509,9 @@ def __enter__(self):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
@@ -566,9 +522,9 @@ def __enter__(self):
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_multiprocess_fd:
|
||||
input_multiprocess_fd.close()
|
||||
read_multiprocess_fd.close()
|
||||
if input_fd:
|
||||
input_fd.close()
|
||||
read_fd.close()
|
||||
|
||||
# Flush immediately before redirecting so that anything buffered
|
||||
# goes to the original stream
|
||||
@@ -586,9 +542,9 @@ def __enter__(self):
|
||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
|
||||
# redirect to the pipe we created above
|
||||
os.dup2(write_fd, sys.stdout.fileno())
|
||||
os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.close(write_fd)
|
||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||
self.write_fd.close()
|
||||
|
||||
else:
|
||||
# Handle I/O the Python way. This won't redirect lower-level
|
||||
@@ -601,7 +557,7 @@ def __enter__(self):
|
||||
self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
|
||||
@@ -637,6 +593,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
else:
|
||||
sys.stdout = self._saved_stdout
|
||||
sys.stderr = self._saved_stderr
|
||||
self.write_fd.close()
|
||||
|
||||
# print log contents in parent if needed.
|
||||
if self.log_file.write_in_parent:
|
||||
@@ -850,14 +807,14 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(
|
||||
stdin_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
echo,
|
||||
log_file_wrapper,
|
||||
control_pipe,
|
||||
filter_fn,
|
||||
):
|
||||
stdin_fd: Optional[Connection],
|
||||
read_fd: Connection,
|
||||
write_fd: Connection,
|
||||
echo: bool,
|
||||
log_file_wrapper: FileWrapper,
|
||||
control_fd: Connection,
|
||||
filter_fn: Optional[Callable[[str], str]],
|
||||
) -> None:
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -894,43 +851,37 @@ def _writer_daemon(
|
||||
``StringIO`` in the parent. This is mainly for testing.
|
||||
|
||||
Arguments:
|
||||
stdin_multiprocess_fd (int): input from the terminal
|
||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||
stdout
|
||||
echo (bool): initial echo setting -- controlled by user and
|
||||
preserved across multiple writer daemons
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
stdin_fd: optional input from the terminal
|
||||
read_fd: pipe for reading from parent's redirected stdout
|
||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||
daemons
|
||||
log_file_wrapper: file to log all output
|
||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||
filter_fn: optional function to filter each line of output
|
||||
|
||||
"""
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
# the parent process. This process depends on closing all instances of
|
||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||
# here. Forking is the process spawning method everywhere except Mac OS
|
||||
# for Python >= 3.8 and on Windows
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
os.close(write_fd)
|
||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||
write_fd.close()
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
else:
|
||||
stdin = None
|
||||
stdin_file = None
|
||||
|
||||
# list of streams to select from
|
||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||
force_echo = False # parent can force echo for certain output
|
||||
|
||||
log_file = log_file_wrapper.unwrap()
|
||||
|
||||
try:
|
||||
with keyboard_input(stdin) as kb:
|
||||
with keyboard_input(stdin_file) as kb:
|
||||
while True:
|
||||
# fix the terminal settings if we recently came to
|
||||
# the foreground
|
||||
@@ -943,12 +894,12 @@ def _writer_daemon(
|
||||
# Allow user to toggle echo with 'v' key.
|
||||
# Currently ignores other chars.
|
||||
# only read stdin if we're in the foreground
|
||||
if stdin in rlist and not _is_background_tty(stdin):
|
||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||
# it's possible to be backgrounded between the above
|
||||
# check and the read, so we ignore SIGTTIN here.
|
||||
with ignore_signal(signal.SIGTTIN):
|
||||
try:
|
||||
if stdin.read(1) == "v":
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
@@ -957,13 +908,13 @@ def _writer_daemon(
|
||||
if e.errno != errno.EIO:
|
||||
raise
|
||||
|
||||
if in_pipe in rlist:
|
||||
if read_file in rlist:
|
||||
line_count = 0
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(in_pipe.readline)()
|
||||
line = _retry(read_file.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
@@ -992,7 +943,7 @@ def _writer_daemon(
|
||||
if xoff in controls:
|
||||
force_echo = False
|
||||
|
||||
if not _input_available(in_pipe):
|
||||
if not _input_available(read_file):
|
||||
break
|
||||
finally:
|
||||
if line_count > 0:
|
||||
@@ -1007,14 +958,14 @@ def _writer_daemon(
|
||||
finally:
|
||||
# send written data back to parent if we used a StringIO
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_pipe.send(log_file.getvalue())
|
||||
control_fd.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||
if stdin_multiprocess_fd:
|
||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||
read_fd.close()
|
||||
if stdin_fd:
|
||||
stdin_fd.close()
|
||||
|
||||
# send echo value back to the parent so it can be preserved.
|
||||
control_pipe.send(echo)
|
||||
control_fd.send(echo)
|
||||
|
||||
|
||||
def _retry(function):
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.22.0.dev0"
|
||||
__version__ = "0.22.3"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
@@ -779,7 +779,7 @@ def check_virtual_with_variants(spec, msg):
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
||||
)
|
||||
errors.append(error)
|
||||
|
||||
@@ -1046,7 +1046,7 @@ def _extracts_errors(triggers, summary):
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
kwargs=("pkgs", "debug_log"),
|
||||
)
|
||||
|
||||
|
||||
@@ -1069,7 +1069,7 @@ def packages_with_detection_tests():
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
@@ -1095,6 +1095,7 @@ def _test_detection_by_executable(pkgs, error_cls):
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
@@ -1111,4 +1112,75 @@ def _test_detection_by_executable(pkgs, error_cls):
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
matched_detection = []
|
||||
for candidate in expected_specs:
|
||||
try:
|
||||
idx = specs.index(candidate)
|
||||
matched_detection.append((candidate, specs[idx]))
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
_summary = f'{pkg_name}: illegal regex in "{_spec}" extra attributes'
|
||||
_details = [f"{_expected} is not a valid regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if not _regex.match(_detected):
|
||||
_summary = (
|
||||
f'{pkg_name}: error when trying to match "{_expected}" '
|
||||
f"in extra attributes"
|
||||
)
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if isinstance(_expected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
_details = [
|
||||
f'"{_expected}" was expected',
|
||||
f'"{_detected}" was detected',
|
||||
] + [f'attribute "{s}" was not detected' for s in sorted(_not_detected)]
|
||||
result.append(error_cls(summary=_summary, details=_details))
|
||||
|
||||
_common = set(_expected.keys()) & set(_detected.keys())
|
||||
for _key in _common:
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
for expected, detected in matched_detection:
|
||||
# We might not want to test all attributes, so avoid not_expected
|
||||
not_detected = set(expected.extra_attributes) - set(detected.extra_attributes)
|
||||
if not_detected:
|
||||
summary = f"{pkg_name}: cannot detect some attributes for spec {expected}"
|
||||
details = [
|
||||
f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
common = set(expected.extra_attributes) & set(detected.extra_attributes)
|
||||
for key in common:
|
||||
errors.extend(
|
||||
_compare_extra_attribute(
|
||||
expected.extra_attributes[key],
|
||||
detected.extra_attributes[key],
|
||||
_spec=expected,
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
@@ -17,19 +17,18 @@
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
@@ -111,10 +110,6 @@ def __init__(self, errors):
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class BinaryCacheIndex:
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
@@ -541,83 +536,6 @@ def binary_index_location():
|
||||
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf-8")
|
||||
@@ -740,7 +658,7 @@ def get_buildfile_manifest(spec):
|
||||
# 2. paths are used as strings.
|
||||
for rel_path in visitor.symlinks:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
link = os.readlink(abs_path)
|
||||
link = readlink(abs_path)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||
data["link_to_relocate"].append(rel_path)
|
||||
|
||||
@@ -980,9 +898,8 @@ def url_read_method(url):
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error("Error reading specfile: {0}".format(url))
|
||||
tty.error(url_err)
|
||||
except web_util.SpackWebError as e:
|
||||
tty.error(f"Error reading specfile: {url}: {e}")
|
||||
return contents
|
||||
|
||||
try:
|
||||
@@ -992,15 +909,10 @@ def url_read_method(url):
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
# If we got some kind of S3 (access denied or other connection error), the first non
|
||||
# boto-specific class in the exception is Exception. Just print a warning and return
|
||||
tty.warn(f"Encountered problem listing packages at {cache_prefix}: {err}")
|
||||
|
||||
return file_list, read_fn
|
||||
|
||||
@@ -1047,11 +959,10 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
"""
|
||||
try:
|
||||
file_list, read_fn = _spec_files_from_cache(cache_prefix)
|
||||
except ListMirrorSpecsError as err:
|
||||
tty.error("Unable to generate package index, {0}".format(err))
|
||||
return
|
||||
except ListMirrorSpecsError as e:
|
||||
raise GenerateIndexError(f"Unable to generate package index: {e}") from e
|
||||
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
tty.debug(f"Retrieving spec descriptor files from {cache_prefix} to build index")
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
@@ -1061,27 +972,22 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
tty.debug("\n" + traceback.format_exc())
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing package index to {cache_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def generate_key_index(key_prefix, tmpdir=None):
|
||||
"""Create the key index page.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
key_prefix. This page contains an entry for each key (.pub) under
|
||||
key_prefix.
|
||||
Creates (or replaces) the "index.json" page at the location given in key_prefix. This page
|
||||
contains an entry for each key (.pub) under key_prefix.
|
||||
"""
|
||||
|
||||
tty.debug(
|
||||
" ".join(
|
||||
("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
|
||||
)
|
||||
)
|
||||
tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index")
|
||||
|
||||
try:
|
||||
fingerprints = (
|
||||
@@ -1089,17 +995,8 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||
if entry.endswith(".pub")
|
||||
)
|
||||
except KeyError as inst:
|
||||
msg = "No keys at {0}: {1}".format(key_prefix, inst)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as e:
|
||||
raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e
|
||||
|
||||
remove_tmpdir = False
|
||||
|
||||
@@ -1124,12 +1021,13 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing key index to {key_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
if remove_tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
@@ -1200,7 +1098,8 @@ def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
|
||||
used at the mirror (following <tarball_directory_name>).
|
||||
|
||||
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
|
||||
spec.json file already exist in the buildcache.
|
||||
spec.json file already exist in the buildcache. It raises :py:class:`PushToBuildCacheError`
|
||||
when the tarball or spec.json file cannot be pushed to the buildcache.
|
||||
"""
|
||||
if not spec.concrete:
|
||||
raise ValueError("spec must be concrete to build tarball")
|
||||
@@ -1278,13 +1177,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
key = select_signing_key(options.key)
|
||||
sign_specfile(key, options.force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
try:
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
except Exception as e:
|
||||
raise PushToBuildCacheError(
|
||||
f"Encountered problem pushing binary {remote_spackfile_path}: {e}"
|
||||
) from e
|
||||
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
@@ -1296,8 +1200,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
if options.regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class NotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
@@ -1352,28 +1254,6 @@ def specs_to_be_packaged(
|
||||
return [s for s in itertools.chain(roots, deps) if not s.external]
|
||||
|
||||
|
||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
||||
"""Create and push binary package for a single spec to the specified
|
||||
mirror url.
|
||||
|
||||
Args:
|
||||
spec: Spec to package and push
|
||||
mirror_url: Desired destination url for binary package
|
||||
options:
|
||||
|
||||
Returns:
|
||||
True if package was pushed, False otherwise.
|
||||
|
||||
"""
|
||||
try:
|
||||
push_or_raise(spec, mirror_url, options)
|
||||
except NoOverwriteException as e:
|
||||
warnings.warn(str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
@@ -2120,6 +2000,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
@@ -2158,21 +2039,17 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_is_signed = True
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
except web_util.SpackWebError as e1:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
except web_util.SpackWebError as e2:
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||
e1,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
@@ -2257,6 +2134,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
for mirror in mirror_collection.values():
|
||||
fetch_url = mirror.fetch_url
|
||||
# TODO: oci:// does not support signing.
|
||||
if fetch_url.startswith("oci://"):
|
||||
continue
|
||||
keys_url = url_util.join(
|
||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
||||
)
|
||||
@@ -2267,19 +2147,12 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
except web_util.SpackWebError as url_err:
|
||||
if web_util.url_exists(keys_index):
|
||||
err_msg = [
|
||||
"Unable to find public keys in {0},",
|
||||
" caught exception attempting to read from {1}.",
|
||||
]
|
||||
|
||||
tty.error(
|
||||
"".join(err_msg).format(
|
||||
url_util.format(fetch_url), url_util.format(keys_index)
|
||||
)
|
||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||
f" caught exception attempting to read from {url_util.format(keys_index)}."
|
||||
)
|
||||
|
||||
tty.debug(url_err)
|
||||
|
||||
continue
|
||||
@@ -2559,7 +2432,7 @@ def get_remote_hash(self):
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
except urllib.error.URLError:
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
@@ -2581,7 +2454,7 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except urllib.error.URLError as e:
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
try:
|
||||
@@ -2622,10 +2495,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
headers = {
|
||||
"User-Agent": web_util.SPACK_USER_AGENT,
|
||||
"If-None-Match": '"{}"'.format(self.etag),
|
||||
}
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
||||
@@ -2633,14 +2503,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
if e.getcode() == 304:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
|
||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||
|
||||
headers = response.headers
|
||||
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
|
||||
@@ -2671,21 +2541,19 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError(
|
||||
"Could not fetch manifest from {}".format(url_manifest), e
|
||||
) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
except Exception as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
# Get first blob hash, which should be the index.json
|
||||
try:
|
||||
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
||||
except Exception as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
# Fresh?
|
||||
if index_digest.digest == self.local_hash:
|
||||
@@ -2706,3 +2574,96 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
||||
|
||||
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class GenerateIndexError(spack.error.SpackError):
|
||||
"""Raised when unable to generate key or package index for mirror"""
|
||||
|
||||
|
||||
class CannotListKeys(GenerateIndexError):
|
||||
"""Raised when unable to list keys when generating key index"""
|
||||
|
||||
|
||||
class PushToBuildCacheError(spack.error.SpackError):
|
||||
"""Raised when unable to push objects to binary mirror"""
|
||||
|
@@ -5,7 +5,13 @@
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
@@ -13,6 +19,8 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
"all_core_root_specs",
|
||||
"ensure_environment_dependencies",
|
||||
|
@@ -54,10 +54,14 @@ def _try_import_from_store(
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
# previously bootstrapped specs may not have a python-venv dependency.
|
||||
if candidate_spec.dependencies("python-venv"):
|
||||
python, *_ = candidate_spec.dependencies("python-venv")
|
||||
else:
|
||||
python, *_ = candidate_spec.dependencies("python")
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
path_before = list(sys.path)
|
||||
|
||||
|
@@ -173,35 +173,14 @@ def _read_metadata(self, package_name: str) -> Any:
|
||||
return data
|
||||
|
||||
def _install_by_hash(
|
||||
self,
|
||||
pkg_hash: str,
|
||||
pkg_sha256: str,
|
||||
index: List[spack.spec.Spec],
|
||||
bincache_platform: spack.platforms.Platform,
|
||||
self, pkg_hash: str, pkg_sha256: str, bincache_platform: spack.platforms.Platform
|
||||
) -> None:
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null",
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family),
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
||||
spec_str = "/" + pkg_hash
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
self,
|
||||
@@ -232,7 +211,7 @@ def _install_and_test(
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||
|
||||
info: ConfigDictionary = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
@@ -291,10 +270,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
||||
# only when bootstrapping from sources on Python 3.12
|
||||
if spec_for_current_python() == "python@3.12":
|
||||
concrete_spec.constrain("+force_setuptools")
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
@@ -559,6 +534,41 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
||||
)
|
||||
|
||||
|
||||
def ensure_winsdk_external_or_raise() -> None:
|
||||
"""Ensure the Windows SDK + WGL are available on system
|
||||
If both of these package are found, the Spack user or bootstrap
|
||||
configuration (depending on where Spack is running)
|
||||
will be updated to include all versions and variants detected.
|
||||
If either the WDK or WSDK are not found, this method will raise
|
||||
a RuntimeError.
|
||||
|
||||
**NOTE:** This modifies the Spack config in the current scope,
|
||||
either user or environment depending on the calling context.
|
||||
This is different from all other current bootstrap dependency
|
||||
checks.
|
||||
"""
|
||||
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||
return
|
||||
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||
missing_packages_lst = []
|
||||
if "wgl" not in externals:
|
||||
missing_packages_lst.append("wgl")
|
||||
if "win-sdk" not in externals:
|
||||
missing_packages_lst.append("win-sdk")
|
||||
missing_packages = " & ".join(missing_packages_lst)
|
||||
raise RuntimeError(
|
||||
f"Unable to find the {missing_packages}, please install these packages \
|
||||
via the Visual Studio installer \
|
||||
before proceeding with Spack or provide the path to a non standard install with \
|
||||
'spack external find --path'"
|
||||
)
|
||||
# wgl/sdk are not required for bootstrapping Spack, but
|
||||
# are required for building anything non trivial
|
||||
# add to user config so they can be used by subsequent Spack ops
|
||||
spack.detection.update_configuration(externals, buildable=False)
|
||||
|
||||
|
||||
def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
@@ -587,7 +597,10 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
try:
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
except OSError:
|
||||
pass
|
||||
return list_of_sources
|
||||
|
@@ -3,13 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import warnings
|
||||
from typing import List
|
||||
from typing import Iterable, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -28,6 +26,16 @@
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
# Remove python package roots created before python-venv was introduced
|
||||
for s in self.concrete_roots():
|
||||
if "python" in s.package.extendees and not s.dependencies("python-venv"):
|
||||
self.deconcretize(s)
|
||||
|
||||
@classmethod
|
||||
def spack_dev_requirements(cls) -> List[str]:
|
||||
"""Spack development requirements"""
|
||||
@@ -59,31 +67,19 @@ def view_root(cls) -> pathlib.Path:
|
||||
return cls.environment_root().joinpath("view")
|
||||
|
||||
@classmethod
|
||||
def pythonpaths(cls) -> List[str]:
|
||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||
result = glob.glob(glob_expr)
|
||||
if not result:
|
||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||
warnings.warn(msg)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def bin_dirs(cls) -> List[pathlib.Path]:
|
||||
def bin_dir(cls) -> pathlib.Path:
|
||||
"""Paths to be added to PATH"""
|
||||
return [cls.view_root().joinpath("bin")]
|
||||
return cls.view_root().joinpath("bin")
|
||||
|
||||
def python_dirs(self) -> Iterable[pathlib.Path]:
|
||||
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
|
||||
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
|
||||
|
||||
@classmethod
|
||||
def spack_yaml(cls) -> pathlib.Path:
|
||||
"""Environment spack.yaml file"""
|
||||
return cls.environment_root().joinpath("spack.yaml")
|
||||
|
||||
def __init__(self) -> None:
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment."""
|
||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||
@@ -100,21 +96,13 @@ def update_installations(self) -> None:
|
||||
self.install_all()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
the environment view.
|
||||
"""
|
||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||
# the performance of the current interpreter
|
||||
sys.path.extend(self.pythonpaths())
|
||||
os.environ["PATH"] = os.pathsep.join(
|
||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||
)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
def load(self) -> None:
|
||||
"""Update PATH and sys.path."""
|
||||
# Make executables available (shouldn't need PYTHONPATH)
|
||||
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
|
||||
|
||||
# Spack itself imports pytest
|
||||
sys.path.extend(str(p) for p in self.python_dirs())
|
||||
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
@@ -164,4 +152,4 @@ def ensure_environment_dependencies() -> None:
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
env.load()
|
||||
|
@@ -43,7 +43,8 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Tuple
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -51,14 +52,15 @@
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
@@ -66,9 +68,11 @@
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
@@ -78,7 +82,7 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
@@ -101,9 +105,13 @@
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
||||
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
|
||||
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
|
||||
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
|
||||
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
||||
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
||||
SPACK_PREFIX = "SPACK_PREFIX"
|
||||
@@ -416,7 +424,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
@@ -472,12 +480,12 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
ccache = Executable("ccache")
|
||||
if not ccache:
|
||||
raise RuntimeError("No ccache binary found in PATH")
|
||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||
# Enable ccache in the compiler wrapper
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
else:
|
||||
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -544,9 +552,26 @@ def update_compiler_args_for_dep(dep):
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
spack_managed_dirs: Set[str] = {
|
||||
spack.stage.get_stage_root(),
|
||||
spack.store.STORE.db.root,
|
||||
*(db.root for db in spack.store.STORE.db.upstream_dbs),
|
||||
}
|
||||
spack_managed_dirs.update([os.path.realpath(p) for p in spack_managed_dirs])
|
||||
|
||||
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in sorted(spack_managed_dirs)))
|
||||
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
|
||||
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
|
||||
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
|
||||
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
|
||||
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
|
||||
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
|
||||
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
|
||||
|
||||
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
@@ -583,10 +608,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
@@ -694,12 +731,28 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
||||
return compiler(*compiler_args, output=compiler_output)
|
||||
|
||||
|
||||
def get_rpath_deps(pkg):
|
||||
"""Return immediate or transitive RPATHs depending on the package."""
|
||||
if pkg.transitive_rpaths:
|
||||
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
||||
else:
|
||||
return pkg.spec.dependencies(deptype="link")
|
||||
def _get_rpath_deps_from_spec(
|
||||
spec: spack.spec.Spec, transitive_rpaths: bool
|
||||
) -> List[spack.spec.Spec]:
|
||||
if not transitive_rpaths:
|
||||
return spec.dependencies(deptype=dt.LINK)
|
||||
|
||||
by_name: Dict[str, spack.spec.Spec] = {}
|
||||
|
||||
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
||||
lookup = by_name.get(dep.name)
|
||||
if lookup is None:
|
||||
by_name[dep.name] = dep
|
||||
elif lookup.version < dep.version:
|
||||
by_name[dep.name] = dep
|
||||
|
||||
return list(by_name.values())
|
||||
|
||||
|
||||
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
||||
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
||||
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
@@ -789,7 +842,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
for mod in ["cray-mpich", "cray-libsci"]:
|
||||
module("unload", mod)
|
||||
|
||||
if target.module_name:
|
||||
if target and target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
load_external_modules(pkg)
|
||||
@@ -1092,18 +1145,60 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
kwargs: Dict,
|
||||
write_pipe: Connection,
|
||||
input_pipe: Optional[Connection],
|
||||
jsfd1: Optional[Connection],
|
||||
jsfd2: Optional[Connection],
|
||||
):
|
||||
"""Main entry point in the child process for Spack builds.
|
||||
|
||||
``_setup_pkg_and_run`` is called by the child process created in
|
||||
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
||||
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
||||
|
||||
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
||||
the following:
|
||||
|
||||
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
||||
particular build phase.
|
||||
|
||||
* ``BaseException``: any exception raised by a child build process, which will be
|
||||
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
||||
raised in the parent.
|
||||
|
||||
* The return value of ``function()``, which can be anything (except an exception).
|
||||
This is returned to the caller.
|
||||
|
||||
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
||||
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
||||
them automatically in the child if they are not passed at process creation time.
|
||||
|
||||
Arguments:
|
||||
serialized_pkg: Spack package install context object (serialized form of the
|
||||
package that we'll build in the child process).
|
||||
function: function to call in the child process; serialized_pkg is passed to
|
||||
this as the first argument.
|
||||
kwargs: additional keyword arguments to pass to ``function()``.
|
||||
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
||||
child *must* send a result (or an error) back to parent on.
|
||||
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
||||
jsfd1: gmake Jobserver file descriptor 1.
|
||||
jsfd2: gmake Jobserver file descriptor 2.
|
||||
|
||||
"""
|
||||
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
# open(os.devnull) to prevent our process and its parent from
|
||||
# simultaneously reading from the original stdin. But, we assume
|
||||
# that the parent process is not going to read from it till we
|
||||
# are done with the child, so we undo Python's precaution.
|
||||
if input_multiprocess_fd is not None:
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||
# assume that the parent process is not going to read from it till we are done with the
|
||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||
if input_pipe is not None:
|
||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||
|
||||
pkg = serialized_pkg.restore()
|
||||
|
||||
@@ -1119,13 +1214,14 @@ def _setup_pkg_and_run(
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
except BaseException as e:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
|
||||
# Need to unwind the traceback in the child because traceback
|
||||
# objects can't be sent to the parent.
|
||||
tb_string = traceback.format_exc()
|
||||
exc_type = type(e)
|
||||
tb = e.__traceback__
|
||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||
|
||||
# build up some context from the offending package so we can
|
||||
# show that, too.
|
||||
@@ -1142,8 +1238,8 @@ def _setup_pkg_and_run(
|
||||
elif context == "test":
|
||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||
|
||||
error_msg = str(exc)
|
||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
error_msg = str(e)
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
@@ -1153,7 +1249,7 @@ def _setup_pkg_and_run(
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
|
||||
# make a pickleable exception to send to parent.
|
||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||
@@ -1171,8 +1267,8 @@ def _setup_pkg_and_run(
|
||||
|
||||
finally:
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_pipe is not None:
|
||||
input_pipe.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
@@ -1199,23 +1295,9 @@ def child_fun():
|
||||
If something goes wrong, the child process catches the error and
|
||||
passes it to the parent wrapped in a ChildError. The parent is
|
||||
expected to handle (or re-raise) the ChildError.
|
||||
|
||||
This uses `multiprocessing.Process` to create the child process. The
|
||||
mechanism used to create the process differs on different operating
|
||||
systems and for different versions of Python. In some cases "fork"
|
||||
is used (i.e. the "fork" system call) and some cases it starts an
|
||||
entirely new Python interpreter process (in the docs this is referred
|
||||
to as the "spawn" start method). Breaking it down by OS:
|
||||
|
||||
- Linux always uses fork.
|
||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||
- Windows always uses the "spawn" start method.
|
||||
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
@@ -1224,14 +1306,13 @@ def child_fun():
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
@@ -1240,7 +1321,7 @@ def child_fun():
|
||||
function,
|
||||
kwargs,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
input_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
),
|
||||
@@ -1260,8 +1341,8 @@ def child_fun():
|
||||
|
||||
finally:
|
||||
# Close the input stream in the parent process
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_fd is not None:
|
||||
input_fd.close()
|
||||
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
|
@@ -434,11 +434,6 @@ def _do_patch_libtool(self):
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.pkg.compiler.name == "dpcpp":
|
||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
||||
# (see https://github.com/spack/spack/issues/32863):
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -15,6 +16,12 @@
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
|
||||
def spec_uses_toolchain(spec):
|
||||
gcc_toolchain_regex = re.compile(".*gcc-toolchain.*")
|
||||
using_toolchain = list(filter(gcc_toolchain_regex.match, spec.compiler_flags["cxxflags"]))
|
||||
return using_toolchain
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
force_str = " FORCE" if force else ""
|
||||
@@ -213,7 +220,7 @@ def initconfig_mpi_entries(self):
|
||||
else:
|
||||
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
||||
# vs the older versions which expect MPIEXEC
|
||||
if self.pkg.spec["cmake"].satisfies("@3.10:"):
|
||||
if spec["cmake"].satisfies("@3.10:"):
|
||||
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
||||
else:
|
||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||
@@ -248,12 +255,17 @@ def initconfig_hardware_entries(self):
|
||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
# CUDA_FLAGS
|
||||
cuda_flags = []
|
||||
|
||||
if not spec.satisfies("cuda_arch=none"):
|
||||
cuda_archs = ";".join(spec.variants["cuda_arch"].value)
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_archs))
|
||||
|
||||
if spec_uses_toolchain(spec):
|
||||
cuda_flags.append("-Xcompiler {}".format(spec_uses_toolchain(spec)[0]))
|
||||
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
@@ -262,9 +274,6 @@ def initconfig_hardware_entries(self):
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
@@ -277,11 +286,9 @@ def initconfig_hardware_entries(self):
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
return entries
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using cargo."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -39,16 +39,11 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||
``find_python_hints`` for context."""
|
||||
if not getattr(pkg, "find_python_hints", False):
|
||||
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
||||
"python", dt.BUILD | dt.LINK
|
||||
):
|
||||
return
|
||||
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
||||
if len(pythons) != 1:
|
||||
return
|
||||
try:
|
||||
python_executable = pythons[0].package.command.path
|
||||
except RuntimeError:
|
||||
return
|
||||
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
|
144
lib/spack/spack/build_systems/compiler.py
Normal file
144
lib/spack/spack/build_systems/compiler.py
Normal file
@@ -0,0 +1,144 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty
|
||||
|
||||
import spack.compiler
|
||||
import spack.package_base
|
||||
|
||||
# Local "type" for type hints
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
|
||||
class CompilerPackage(spack.package_base.PackageBase):
|
||||
"""A Package mixin for all common logic for packages that implement compilers"""
|
||||
|
||||
# TODO: how do these play nicely with other tags
|
||||
tags: Sequence[str] = ["compiler"]
|
||||
|
||||
#: Optional suffix regexes for searching for this type of compiler.
|
||||
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
#: version suffix for gcc.
|
||||
compiler_suffixes: List[str] = [r"-.*"]
|
||||
|
||||
#: Optional prefix regexes for searching for this compiler
|
||||
compiler_prefixes: List[str] = []
|
||||
|
||||
#: Compiler argument(s) that produces version information
|
||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
compiler_version_regex: str = "(.*)"
|
||||
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
||||
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> Sequence[str]:
|
||||
"""Dynamic definition of languages supported by this package"""
|
||||
return self.compiler_languages
|
||||
|
||||
@classproperty
|
||||
def compiler_names(cls) -> Sequence[str]:
|
||||
"""Construct list of compiler names from per-language names"""
|
||||
names = []
|
||||
for language in cls.compiler_languages:
|
||||
names.extend(getattr(cls, f"{language}_names"))
|
||||
return names
|
||||
|
||||
@classproperty
|
||||
def executables(cls) -> Sequence[str]:
|
||||
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
||||
regexp_fmt = r"^({0}){1}({2})$"
|
||||
prefixes = [""] + cls.compiler_prefixes
|
||||
suffixes = [""] + cls.compiler_suffixes
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
suffixes += [suf + ext for suf in suffixes]
|
||||
return [
|
||||
regexp_fmt.format(prefix, re.escape(name), suffix)
|
||||
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path):
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
except spack.util.executable.ProcessError:
|
||||
pass
|
||||
except Exception as e:
|
||||
tty.debug(
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||
return os.path.join(prefix, "bin")
|
||||
|
||||
@classmethod
|
||||
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
"""Compute the paths to compiler executables associated with this package
|
||||
|
||||
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
||||
to include with each spec object."""
|
||||
# There are often at least two copies (not symlinks) of each compiler executable in the
|
||||
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
||||
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
||||
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
||||
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
||||
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
||||
# order:
|
||||
# First pass over languages identifies exes that are perfect matches for canonical names
|
||||
# Second pass checks for names with prefix/suffix
|
||||
# Second pass is sorted by language name length because longer named languages
|
||||
# e.g. cxx can often contain the names of shorter named languages
|
||||
# e.g. c (e.g. clang/clang++)
|
||||
paths = {}
|
||||
exes = sorted(exes, reverse=True)
|
||||
languages = {
|
||||
lang: getattr(cls, f"{lang}_names")
|
||||
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
||||
}
|
||||
for exe in exes:
|
||||
for lang, names in languages.items():
|
||||
if os.path.basename(exe) in names:
|
||||
paths[lang] = exe
|
||||
break
|
||||
else:
|
||||
for lang, names in languages.items():
|
||||
if any(name in os.path.basename(exe) for name in names):
|
||||
paths[lang] = exe
|
||||
break
|
||||
|
||||
return paths
|
||||
|
||||
@classmethod
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
@@ -21,7 +21,7 @@
|
||||
|
||||
|
||||
class MakefilePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using Makefiles."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
||||
opts += self.nmake_install_args()
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, variant
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
@@ -26,10 +26,11 @@ class IntelOneApiPackage(Package):
|
||||
"""Base class for Intel oneAPI packages."""
|
||||
|
||||
homepage = "https://software.intel.com/oneapi"
|
||||
license("https://intel.ly/393CijO")
|
||||
|
||||
# oneAPI license does not allow mirroring outside of the
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
redistribute(source=False, binary=False)
|
||||
|
||||
for c in [
|
||||
"target=ppc64:",
|
||||
|
@@ -138,16 +138,21 @@ def view_file_conflicts(self, view, merge_map):
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
||||
if not self.extendee_spec or self.extendee_spec.external:
|
||||
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||
# view.
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
|
||||
if python.external:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
# We only patch shebangs in the bin directory.
|
||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
|
||||
for src, dst in merge_map.items():
|
||||
if skip_if_exists and os.path.lexists(dst):
|
||||
continue
|
||||
@@ -168,7 +173,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||
shutil.copy2(src, dst)
|
||||
fs.filter_file(
|
||||
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
)
|
||||
else:
|
||||
view.link(src, dst)
|
||||
@@ -199,14 +204,13 @@ def remove_files_from_view(self, view, merge_map):
|
||||
ignore_namespace = True
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
||||
|
||||
to_remove = []
|
||||
for src, dst in merge_map.items():
|
||||
if ignore_namespace and namespace_init(dst):
|
||||
continue
|
||||
|
||||
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
||||
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||
to_remove.append(dst)
|
||||
else:
|
||||
os.remove(dst)
|
||||
@@ -362,6 +366,12 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
|
||||
@property
|
||||
def headers(self) -> HeaderList:
|
||||
"""Discover header files in platlib."""
|
||||
@@ -371,8 +381,9 @@ def headers(self) -> HeaderList:
|
||||
|
||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
|
||||
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
||||
headers = functools.reduce(operator.add, headers_list)
|
||||
@@ -391,8 +402,9 @@ def libs(self) -> LibraryList:
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
|
||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||
@@ -504,6 +516,8 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
|
||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install everything from build directory."""
|
||||
pip = spec["python"].command
|
||||
pip.add_default_arg("-m", "pip")
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
||||
@@ -519,14 +533,6 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
else:
|
||||
args.append(".")
|
||||
|
||||
pip = spec["python"].command
|
||||
# Hide user packages, since we don't have build isolation. This is
|
||||
# necessary because pip / setuptools may run hooks from arbitrary
|
||||
# packages during the build. There is no equivalent variable to hide
|
||||
# system packages, so this is not reliable for external Python.
|
||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||
pip.add_default_arg("-m")
|
||||
pip.add_default_arg("pip")
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
|
@@ -75,9 +75,12 @@
|
||||
# does not like its directory structure.
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
@@ -154,6 +157,25 @@ def hip_flags(amdgpu_target):
|
||||
archs = ",".join(amdgpu_target)
|
||||
return "--amdgpu-target={0}".format(archs)
|
||||
|
||||
def asan_on(self, env: EnvironmentModifications):
|
||||
llvm_path = self.spec["llvm-amdgpu"].prefix
|
||||
env.set("CC", llvm_path + "/bin/clang")
|
||||
env.set("CXX", llvm_path + "/bin/clang++")
|
||||
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||
|
||||
for root, _, files in os.walk(llvm_path):
|
||||
if "libclang_rt.asan-x86_64.so" in files:
|
||||
asan_lib_path = root
|
||||
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||
if "rhel" in self.spec.os or "sles" in self.spec.os:
|
||||
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||
else:
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
|
||||
env.set("CFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||
env.set("CXXFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||
|
||||
# HIP version vs Architecture
|
||||
|
||||
# TODO: add a bunch of lines like:
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -334,8 +334,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorators (dict): dictionary mappng specs to decorators
|
||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
@@ -384,15 +383,13 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
|
||||
transform = {"package": decorator, "fullpackage": decorator}
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
string += s.cformat(format_string, transform=transform)
|
||||
string += decorator(s, s.cformat(format_string))
|
||||
return string
|
||||
|
||||
def format_list(specs):
|
||||
@@ -451,7 +448,7 @@ def filter_loaded_specs(specs):
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
def print_how_many_pkgs(specs, pkg_type=""):
|
||||
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
|
||||
"""Given a list of specs, this will print a message about how many
|
||||
specs are in that list.
|
||||
|
||||
@@ -462,7 +459,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
@@ -84,7 +84,7 @@ def externals(parser, args):
|
||||
return
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
|
@@ -13,7 +13,6 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.request
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -54,6 +53,7 @@
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
list_tags,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
@@ -133,6 +133,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
||||
"runtime dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--private",
|
||||
action="store_true",
|
||||
help="for a private mirror, include non-redistributable packages",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -275,23 +280,37 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
|
||||
sync_manifest_source = sync.add_argument_group(
|
||||
"Manifest Source",
|
||||
"Specify a list of build cache objects to sync using manifest file(s)."
|
||||
'This option takes the place of the "source mirror" for synchronization'
|
||||
'and optionally takes a "destination mirror" ',
|
||||
)
|
||||
sync.add_argument(
|
||||
sync_manifest_source.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying CI rebuild manifest files"
|
||||
)
|
||||
sync_source_mirror = sync.add_argument_group(
|
||||
"Named Source",
|
||||
"Specify a single registered source mirror to synchronize from. This option requires"
|
||||
"the specification of a destination mirror.",
|
||||
)
|
||||
sync_source_mirror.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
@@ -353,6 +372,25 @@ def _make_pool() -> MaybePool:
|
||||
return NoPool()
|
||||
|
||||
|
||||
def _skip_no_redistribute_for_public(specs):
|
||||
remaining_specs = list()
|
||||
removed_specs = list()
|
||||
for spec in specs:
|
||||
if spec.package.redistribute_binary:
|
||||
remaining_specs.append(spec)
|
||||
else:
|
||||
removed_specs.append(spec)
|
||||
if removed_specs:
|
||||
colified_output = tty.colify.colified(list(s.name for s in removed_specs), indent=4)
|
||||
tty.debug(
|
||||
"The following specs will not be added to the binary cache"
|
||||
" because they cannot be redistributed:\n"
|
||||
f"{colified_output}\n"
|
||||
"You can use `--private` to include them."
|
||||
)
|
||||
return remaining_specs
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -403,6 +441,8 @@ def push_fn(args):
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
if not args.private:
|
||||
specs = _skip_no_redistribute_for_public(specs)
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
@@ -773,7 +813,7 @@ def _push_oci(
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["buildcache_layout_version"] = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
@@ -816,10 +856,7 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
request = urllib.request.Request(url=image_ref.tags_url())
|
||||
response = spack.oci.opener.urlopen(request)
|
||||
spack.oci.opener.ensure_status(request, response, 200)
|
||||
tags = json.load(response)["tags"]
|
||||
tags = list_tags(image_ref)
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
@@ -1070,7 +1107,17 @@ def sync_fn(args):
|
||||
requires an active environment in order to know which specs to sync
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
# Passing the args.src_mirror here because it is not possible to
|
||||
# have the destination be required when specifying a named source
|
||||
# mirror and optional for the --manifest-glob argument. In the case
|
||||
# of manifest glob sync, the source mirror positional argument is the
|
||||
# destination mirror if it is specified. If there are two mirrors
|
||||
# specified, the second is ignored and the first is the override
|
||||
# destination.
|
||||
if args.dest_mirror:
|
||||
tty.warn(f"Ignoring unused arguemnt: {args.dest_mirror.name}")
|
||||
|
||||
manifest_copy(glob.glob(args.manifest_glob), args.src_mirror)
|
||||
return 0
|
||||
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
@@ -1121,7 +1168,7 @@ def sync_fn(args):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list):
|
||||
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
@@ -1135,10 +1182,17 @@ def manifest_copy(manifest_file_list):
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"]))
|
||||
copy_buildcache_file(copy_file["src"], copy_file["dest"])
|
||||
dest = copy_file["dest"]
|
||||
if dest_mirror:
|
||||
src_relative_path = os.path.join(
|
||||
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||
)
|
||||
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
@@ -1165,14 +1219,18 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||
)
|
||||
|
||||
bindist.generate_key_index(keys_url)
|
||||
try:
|
||||
bindist.generate_key_index(keys_url)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
tty.warn(f"did not update the key index: {e}")
|
||||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
return update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
||||
return args.func(args)
|
||||
|
@@ -183,7 +183,7 @@ def checksum(parser, args):
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
add_versions_to_package(pkg, version_lines, args.batch)
|
||||
|
||||
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
@@ -229,7 +229,7 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
@@ -282,5 +282,5 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
if sys.stdout.isatty() and not is_batch:
|
||||
editor(filename)
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.ci as spack_ci
|
||||
import spack.cmd
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
@@ -30,14 +31,20 @@
|
||||
level = "long"
|
||||
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
|
||||
def deindent(desc):
|
||||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def unicode_escape(path: str) -> str:
|
||||
"""Returns transformed path with any unicode
|
||||
characters replaced with their corresponding escapes"""
|
||||
return path.encode("unicode-escape").decode("utf-8")
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
@@ -549,75 +556,35 @@ def ci_rebuild(args):
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||
|
||||
config = cfg.get("config")
|
||||
if not config["verify_ssl"]:
|
||||
spack_cmd.append("-k")
|
||||
|
||||
install_args = []
|
||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing dependencies from cache
|
||||
deps_install_args = install_args
|
||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
root_install_args = install_args + [
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
]
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
root_install_args.extend(cdash_handler.args())
|
||||
root_install_args.append(slash_hash)
|
||||
|
||||
# ["x", "y"] -> "'x' 'y'"
|
||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
"Makefile",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
slash_hash, # limit to spec we're building
|
||||
],
|
||||
[
|
||||
# --output-sync requires GNU make 4.x.
|
||||
# Old make errors when you pass it a flag it doesn't recognize,
|
||||
# but it doesn't error or warn when you set unrecognized flags in
|
||||
# this variable.
|
||||
"export",
|
||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
||||
],
|
||||
[
|
||||
MAKE_COMMAND,
|
||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||
spack_cmd + deps_install_args + [slash_hash],
|
||||
spack_cmd + root_install_args + [slash_hash],
|
||||
]
|
||||
|
||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||
|
||||
@@ -705,11 +672,9 @@ def ci_rebuild(args):
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||
# will result in a non-zero exit code. Pushing is best-effort.
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
@@ -721,13 +686,12 @@ def ci_rebuild(args):
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
if not result.success:
|
||||
install_exit_code = FAILED_CREATE_BUILDCACHE_CODE
|
||||
(tty.msg if result.success else tty.error)(
|
||||
f'{"Pushed" if result.success else "Failed to push"} '
|
||||
f'{job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when())} '
|
||||
f"to {result.url}"
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
@@ -748,22 +712,22 @@ def ci_rebuild(args):
|
||||
tty.warn(msg.format(broken_spec_path, err))
|
||||
|
||||
else:
|
||||
# If the install did not succeed, print out some instructions on how to reproduce this
|
||||
# build failure outside of the pipeline environment.
|
||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||
|
||||
api_root_url = os.environ.get("CI_API_V4_URL")
|
||||
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
||||
ci_job_id = os.environ.get("CI_JOB_ID")
|
||||
|
||||
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
|
||||
api_root_url, ci_project_id, ci_job_id
|
||||
)
|
||||
|
||||
repro_job_url = f"{api_root_url}/projects/{ci_project_id}/jobs/{ci_job_id}/artifacts"
|
||||
# Control characters cause this to be printed in blue so it stands out
|
||||
reproduce_msg = """
|
||||
print(
|
||||
f"""
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
spack ci reproduce-build {repro_job_url} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -771,12 +735,9 @@ def ci_rebuild(args):
|
||||
|
||||
... then follow the printed instructions.\033[0;0m
|
||||
|
||||
""".format(
|
||||
repro_job_url
|
||||
"""
|
||||
)
|
||||
|
||||
print(reproduce_msg)
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
|
@@ -11,7 +11,6 @@
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -867,9 +866,6 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
||||
if args.update_completion:
|
||||
fs.set_executable(args.update)
|
||||
|
||||
else:
|
||||
prepend_header(args, sys.stdout)
|
||||
formatter(args, sys.stdout)
|
||||
|
@@ -563,12 +563,13 @@ def add_concretizer_args(subparser):
|
||||
help="reuse installed packages/buildcaches when possible",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--fresh-roots",
|
||||
"--reuse-deps",
|
||||
action=ConfigSetAction,
|
||||
dest="concretizer:reuse",
|
||||
const="dependencies",
|
||||
default=None,
|
||||
help="reuse installed dependencies only",
|
||||
help="concretize with fresh roots and reused dependencies",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--deprecated",
|
||||
@@ -660,34 +661,32 @@ def mirror_name_or_url(m):
|
||||
# accidentally to a dir in the current working directory.
|
||||
|
||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||
if "/" in m or "\\" in m:
|
||||
if "/" in m or "\\" in m or m in (".", ".."):
|
||||
return spack.mirror.Mirror(m)
|
||||
|
||||
# Otherwise, the named mirror is required to exist.
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(m)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(
|
||||
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
|
||||
)
|
||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||
|
||||
|
||||
def mirror_url(url):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_url(url)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_directory(path):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_local_path(path)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_name(name):
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(name)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
@@ -9,13 +9,14 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common
|
||||
@@ -44,6 +45,7 @@
|
||||
"deactivate",
|
||||
"create",
|
||||
["remove", "rm"],
|
||||
["rename", "mv"],
|
||||
["list", "ls"],
|
||||
["status", "st"],
|
||||
"loads",
|
||||
@@ -59,14 +61,7 @@
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment"""
|
||||
subparser.add_argument(
|
||||
"env_name",
|
||||
metavar="env",
|
||||
help=(
|
||||
"name of managed environment or directory of the anonymous env "
|
||||
"(when using --dir/-d) to activate"
|
||||
),
|
||||
)
|
||||
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
@@ -92,6 +87,9 @@ def env_create_setup_parser(subparser):
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||
)
|
||||
|
||||
|
||||
def env_create(args):
|
||||
@@ -109,19 +107,32 @@ def env_create(args):
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
include_concrete = None
|
||||
if hasattr(args, "include_concrete"):
|
||||
include_concrete = args.include_concrete
|
||||
|
||||
env = _env_create(
|
||||
args.env_name,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir,
|
||||
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||
with_view=with_view,
|
||||
keep_relative=args.keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
def _env_create(
|
||||
name_or_path: str,
|
||||
*,
|
||||
init_file: Optional[str] = None,
|
||||
dir: bool = False,
|
||||
with_view: Optional[str] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
Arguments:
|
||||
@@ -133,22 +144,31 @@ def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep
|
||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||
the new environment file, otherwise they may be made absolute if the
|
||||
new environment is in a different location
|
||||
include_concrete (list): list of the included concrete environments
|
||||
"""
|
||||
if not dir:
|
||||
env = ev.create(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % (name_or_path))
|
||||
return env
|
||||
|
||||
env = ev.create_in_dir(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg("Created environment in %s" % env.path)
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % env.path)
|
||||
tty.msg(
|
||||
colorize(
|
||||
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||
)
|
||||
)
|
||||
else:
|
||||
env = ev.create_in_dir(
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||
return env
|
||||
|
||||
|
||||
@@ -434,6 +454,12 @@ def env_remove_setup_parser(subparser):
|
||||
"""remove an existing environment"""
|
||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="remove the environment even if it is included in another environment",
|
||||
)
|
||||
|
||||
|
||||
def env_remove(args):
|
||||
@@ -442,14 +468,34 @@ def env_remove(args):
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
for env_name in args.rm_env:
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
read_envs.append(env)
|
||||
valid_envs.append(env)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
bad_envs.append(env_name)
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
@@ -458,7 +504,7 @@ def env_remove(args):
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in read_envs:
|
||||
for env in remove_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
@@ -472,11 +518,82 @@ def env_remove(args):
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
|
||||
|
||||
#
|
||||
# env rename
|
||||
#
|
||||
def env_rename_setup_parser(subparser):
|
||||
"""rename an existing environment"""
|
||||
subparser.add_argument(
|
||||
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
action="store_true",
|
||||
help="the specified arguments correspond to directory paths",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||
)
|
||||
|
||||
|
||||
def env_rename(args):
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an anonymous environment.
|
||||
"""
|
||||
|
||||
# Directory option has been specified
|
||||
if args.dir:
|
||||
if not ev.is_env_dir(args.mv_from):
|
||||
tty.die("The specified path does not correspond to a valid spack environment")
|
||||
from_path = Path(args.mv_from)
|
||||
if not args.force:
|
||||
if ev.is_env_dir(args.mv_to):
|
||||
tty.die(
|
||||
"The new path corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
if Path(args.mv_to).exists():
|
||||
tty.die("The new path already exists; specify the --force flag to overwrite it.")
|
||||
to_path = Path(args.mv_to)
|
||||
|
||||
# Name option being used
|
||||
elif ev.exists(args.mv_from):
|
||||
from_path = ev.environment.environment_dir_from_name(args.mv_from)
|
||||
if not args.force and ev.exists(args.mv_to):
|
||||
tty.die(
|
||||
"The new name corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
to_path = ev.environment.root(args.mv_to)
|
||||
|
||||
# Neither
|
||||
else:
|
||||
tty.die("The specified name does not correspond to a managed spack environment")
|
||||
|
||||
# Guard against renaming from or to an active environment
|
||||
active_env = ev.active_environment()
|
||||
if active_env:
|
||||
from_env = ev.Environment(from_path)
|
||||
if from_env.path == active_env.path:
|
||||
tty.die("Cannot rename active environment")
|
||||
if to_path == active_env.path:
|
||||
tty.die(f"{args.mv_to} is an active environment")
|
||||
|
||||
shutil.rmtree(to_path, ignore_errors=True)
|
||||
fs.rename(from_path, to_path)
|
||||
tty.msg(f"Successfully renamed environment {args.mv_from} to {args.mv_to}")
|
||||
|
||||
|
||||
#
|
||||
# env list
|
||||
#
|
||||
def env_list_setup_parser(subparser):
|
||||
"""list available environments"""
|
||||
"""list managed environments"""
|
||||
|
||||
|
||||
def env_list(args):
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import spack.cmd as cmd
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
@@ -69,6 +70,12 @@ def setup_parser(subparser):
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
"--only-roots",
|
||||
action="store_true",
|
||||
help="don't show full list of installed specs in an environment",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--show-concretized",
|
||||
@@ -140,6 +147,12 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
default="all",
|
||||
help="Install trees to query: 'all' (default), 'local', 'upstream', upstream name or path",
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -156,9 +169,9 @@ def query_arguments(args):
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
|
||||
known = any
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
known = False
|
||||
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
||||
|
||||
explicit = any
|
||||
if args.explicit:
|
||||
@@ -166,7 +179,13 @@ def query_arguments(args):
|
||||
if args.implicit:
|
||||
explicit = False
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
if install_tree in upstreams.keys():
|
||||
install_tree = upstreams[install_tree]["install_tree"]
|
||||
q_args["install_tree"] = install_tree
|
||||
|
||||
# Time window of installation
|
||||
for attribute in ("start_date", "end_date"):
|
||||
@@ -177,26 +196,22 @@ def query_arguments(args):
|
||||
return q_args
|
||||
|
||||
|
||||
def setup_env(env):
|
||||
def make_env_decorator(env):
|
||||
"""Create a function for decorating specs when in an environment."""
|
||||
|
||||
def strip_build(seq):
|
||||
return set(s.copy(deps=("link", "run")) for s in seq)
|
||||
|
||||
added = set(strip_build(env.added_specs()))
|
||||
roots = set(strip_build(env.roots()))
|
||||
removed = set(strip_build(env.removed_specs()))
|
||||
roots = set(env.roots())
|
||||
removed = set(env.removed_specs())
|
||||
|
||||
def decorator(spec, fmt):
|
||||
# add +/-/* to show added/removed/root specs
|
||||
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
||||
return color.colorize("@*{%s}" % fmt)
|
||||
return color.colorize(f"@*{{{fmt}}}")
|
||||
elif spec in removed:
|
||||
return color.colorize("@K{%s}" % fmt)
|
||||
return color.colorize(f"@K{{{fmt}}}")
|
||||
else:
|
||||
return "%s" % fmt
|
||||
return fmt
|
||||
|
||||
return decorator, added, roots, removed
|
||||
return decorator
|
||||
|
||||
|
||||
def display_env(env, args, decorator, results):
|
||||
@@ -211,10 +226,54 @@ def display_env(env, args, decorator, results):
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
|
||||
if not env.user_specs:
|
||||
tty.msg("No root specs")
|
||||
else:
|
||||
tty.msg("Root specs")
|
||||
num_roots = len(env.user_specs) or "No"
|
||||
tty.msg(f"{num_roots} root specs")
|
||||
|
||||
concrete_specs = {
|
||||
root: concrete_root
|
||||
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
|
||||
}
|
||||
|
||||
def root_decorator(spec, string):
|
||||
"""Decorate root specs with their install status if needed"""
|
||||
concrete = concrete_specs.get(spec)
|
||||
if concrete:
|
||||
status = color.colorize(concrete.install_status().value)
|
||||
hash = concrete.dag_hash()
|
||||
else:
|
||||
status = color.colorize(spack.spec.InstallStatus.absent.value)
|
||||
hash = "-" * 32
|
||||
|
||||
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
|
||||
# TODO: space format idiosyncrasies is complicated. Fix this eventually
|
||||
status = status[:-2]
|
||||
|
||||
if args.long or args.very_long:
|
||||
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
|
||||
return f"{status} {hash} {string}"
|
||||
else:
|
||||
return f"{status} {string}"
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
cmd.display_specs(
|
||||
env.user_specs,
|
||||
args,
|
||||
# these are overrides of CLI args
|
||||
paths=False,
|
||||
long=False,
|
||||
very_long=False,
|
||||
# these enforce details in the root specs to show what the user asked for
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
)
|
||||
|
||||
print()
|
||||
|
||||
if env.included_concrete_envs:
|
||||
tty.msg("Included specs")
|
||||
|
||||
# Root specs cannot be displayed with prefixes, since those are not
|
||||
# set for abstract specs. Same for hashes
|
||||
@@ -224,10 +283,10 @@ def display_env(env, args, decorator, results):
|
||||
# Roots are displayed with variants, etc. so that we can see
|
||||
# specifically what the user asked for.
|
||||
cmd.display_specs(
|
||||
env.user_specs,
|
||||
env.included_user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespaces=True,
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
@@ -242,7 +301,7 @@ def display_env(env, args, decorator, results):
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results:
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
@@ -251,9 +310,10 @@ def find(parser, args):
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
decorator = lambda s, f: f
|
||||
if env:
|
||||
decorator, _, roots, _ = setup_env(env)
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
@@ -280,9 +340,12 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
|
@@ -263,8 +263,8 @@ def _fmt_name_and_default(variant):
|
||||
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
|
||||
|
||||
|
||||
def _fmt_when(when, indent):
|
||||
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
|
||||
def _fmt_when(when: "spack.spec.Spec", indent: int):
|
||||
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(str(when))}")
|
||||
|
||||
|
||||
def _fmt_variant_description(variant, width, indent):
|
||||
@@ -441,7 +441,7 @@ def get_url(version):
|
||||
return "No URL"
|
||||
|
||||
url = get_url(preferred) if pkg.has_code else ""
|
||||
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
|
||||
line = version(" {0}".format(pad(preferred))) + color.cescape(str(url))
|
||||
color.cwrite(line)
|
||||
|
||||
print()
|
||||
@@ -464,7 +464,7 @@ def get_url(version):
|
||||
continue
|
||||
|
||||
for v, url in vers:
|
||||
line = version(" {0}".format(pad(v))) + color.cescape(url)
|
||||
line = version(" {0}".format(pad(v))) + color.cescape(str(url))
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
@@ -475,10 +475,7 @@ def print_virtuals(pkg, args):
|
||||
color.cprint(section_title("Virtual Packages: "))
|
||||
if pkg.provided:
|
||||
for when, specs in reversed(sorted(pkg.provided.items())):
|
||||
line = " %s provides %s" % (
|
||||
when.colorized(),
|
||||
", ".join(s.colorized() for s in specs),
|
||||
)
|
||||
line = " %s provides %s" % (when.cformat(), ", ".join(s.cformat() for s in specs))
|
||||
print(line)
|
||||
|
||||
else:
|
||||
@@ -497,7 +494,9 @@ def print_licenses(pkg, args):
|
||||
pad = padder(pkg.licenses, 4)
|
||||
for when_spec in pkg.licenses:
|
||||
license_identifier = pkg.licenses[when_spec]
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(
|
||||
str(when_spec)
|
||||
)
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
|
@@ -61,7 +61,6 @@ def install_kwargs_from_args(args):
|
||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||
"include_build_deps": args.include_build_deps,
|
||||
"explicit": True, # Use true as a default for install command
|
||||
"stop_at": args.until,
|
||||
"unsigned": args.unsigned,
|
||||
"install_deps": ("dependencies" in args.things_to_install),
|
||||
@@ -420,10 +419,9 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
|
||||
with reporter_factory(specs_to_install):
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
finally:
|
||||
# TODO: this is doing way too much to trigger
|
||||
# views and modules to be generated.
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
if env.views:
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
|
||||
|
||||
def concrete_specs_from_cli(args, install_kwargs):
|
||||
@@ -474,6 +472,7 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
|
||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
||||
builder = PackageInstaller(installs)
|
||||
installs = [s.package for s in concrete_specs]
|
||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||
builder = PackageInstaller(installs, install_kwargs)
|
||||
builder.install()
|
||||
|
@@ -5,8 +5,6 @@
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
@@ -70,16 +68,6 @@ def setup_parser(subparser):
|
||||
help="load the first match if multiple packages match the spec",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_load",
|
||||
choices=["package", "dependencies"],
|
||||
help="select whether to load the package and its dependencies\n\n"
|
||||
"the default is to load the package and all dependencies. alternatively, "
|
||||
"one can decide to load only the package or only the dependencies",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
@@ -110,11 +98,6 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
if args.things_to_load != "package,dependencies":
|
||||
tty.warn(
|
||||
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs)
|
||||
for spec in specs:
|
||||
|
@@ -101,8 +101,9 @@ def do_mark(specs, explicit):
|
||||
specs (list): list of specs to be marked
|
||||
explicit (bool): whether to mark specs as explicitly installed
|
||||
"""
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
with spack.store.STORE.db.write_transaction():
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
||||
|
||||
|
||||
def mark_specs(args, specs):
|
||||
|
@@ -71,6 +71,11 @@ def setup_parser(subparser):
|
||||
help="the number of versions to fetch for each spec, choose 'all' to"
|
||||
" retrieve all versions of each package",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"--private",
|
||||
action="store_true",
|
||||
help="for a private mirror, include non-redistributable packages",
|
||||
)
|
||||
arguments.add_common_arguments(create_parser, ["specs"])
|
||||
arguments.add_concretizer_args(create_parser)
|
||||
|
||||
@@ -108,6 +113,11 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--autopush",
|
||||
action="store_true",
|
||||
help=("set mirror to push automatically after installation"),
|
||||
)
|
||||
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
|
||||
add_parser_signed.add_argument(
|
||||
"--unsigned",
|
||||
@@ -175,6 +185,21 @@ def setup_parser(subparser):
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser_autopush = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_autopush.add_argument(
|
||||
"--autopush",
|
||||
help="set mirror to push automatically after installation",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_autopush.add_argument(
|
||||
"--no-autopush",
|
||||
help="set mirror to not push automatically after installation",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--unsigned",
|
||||
@@ -218,6 +243,7 @@ def mirror_add(args):
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
@@ -234,6 +260,8 @@ def mirror_add(args):
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
if args.autopush:
|
||||
connection["autopush"] = args.autopush
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
@@ -270,6 +298,8 @@ def _configure_mirror(args):
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
changes["autopush"] = args.autopush
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
@@ -334,7 +364,6 @@ def concrete_specs_from_user(args):
|
||||
specs = filter_externals(specs)
|
||||
specs = list(set(specs))
|
||||
specs.sort(key=lambda s: (s.name, s.version))
|
||||
specs, _ = lang.stable_partition(specs, predicate_fn=not_excluded_fn(args))
|
||||
return specs
|
||||
|
||||
|
||||
@@ -379,36 +408,50 @@ def concrete_specs_from_cli_or_file(args):
|
||||
return specs
|
||||
|
||||
|
||||
def not_excluded_fn(args):
|
||||
"""Return a predicate that evaluate to True if a spec was not explicitly
|
||||
excluded by the user.
|
||||
"""
|
||||
exclude_specs = []
|
||||
if args.exclude_file:
|
||||
exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||
if args.exclude_specs:
|
||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
class IncludeFilter:
|
||||
def __init__(self, args):
|
||||
self.exclude_specs = []
|
||||
if args.exclude_file:
|
||||
self.exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||
if args.exclude_specs:
|
||||
self.exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
self.private = args.private
|
||||
|
||||
def not_excluded(x):
|
||||
return not any(x.satisfies(y) for y in exclude_specs)
|
||||
def __call__(self, x):
|
||||
return all([self._not_license_excluded(x), self._not_cmdline_excluded(x)])
|
||||
|
||||
return not_excluded
|
||||
def _not_license_excluded(self, x):
|
||||
"""True if the spec is for a private mirror, or as long as the
|
||||
package does not explicitly forbid redistributing source."""
|
||||
if self.private:
|
||||
return True
|
||||
elif x.package_class.redistribute_source(x):
|
||||
return True
|
||||
else:
|
||||
tty.debug(
|
||||
"Skip adding {0} to mirror: the package.py file"
|
||||
" indicates that a public mirror should not contain"
|
||||
" it.".format(x.name)
|
||||
)
|
||||
return False
|
||||
|
||||
def _not_cmdline_excluded(self, x):
|
||||
"""True if a spec was not explicitly excluded by the user."""
|
||||
return not any(x.satisfies(y) for y in self.exclude_specs)
|
||||
|
||||
|
||||
def concrete_specs_from_environment(selection_fn):
|
||||
def concrete_specs_from_environment():
|
||||
env = ev.active_environment()
|
||||
assert env, "an active environment is required"
|
||||
mirror_specs = env.all_specs()
|
||||
mirror_specs = filter_externals(mirror_specs)
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
||||
return mirror_specs
|
||||
|
||||
|
||||
def all_specs_with_all_versions(selection_fn):
|
||||
def all_specs_with_all_versions():
|
||||
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
||||
return mirror_specs
|
||||
|
||||
|
||||
@@ -429,12 +472,6 @@ def versions_per_spec(args):
|
||||
return num_versions
|
||||
|
||||
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
def process_mirror_stats(present, mirrored, error):
|
||||
p, m, e = len(present), len(mirrored), len(error)
|
||||
tty.msg(
|
||||
@@ -480,30 +517,28 @@ def mirror_create(args):
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
|
||||
mirror_specs, mirror_fn = _specs_and_action(args)
|
||||
mirror_fn(mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions)
|
||||
|
||||
|
||||
def _specs_and_action(args):
|
||||
include_fn = IncludeFilter(args)
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
return
|
||||
mirror_specs = all_specs_with_all_versions()
|
||||
mirror_fn = create_mirror_for_all_specs
|
||||
elif args.all and ev.active_environment():
|
||||
mirror_specs = concrete_specs_from_environment()
|
||||
mirror_fn = create_mirror_for_individual_specs
|
||||
else:
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
mirror_fn = create_mirror_for_individual_specs
|
||||
|
||||
if args.all and ev.active_environment():
|
||||
create_mirror_for_all_specs_inside_environment(
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
return
|
||||
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
||||
)
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=include_fn)
|
||||
return mirror_specs, mirror_fn
|
||||
|
||||
|
||||
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
|
||||
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
||||
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
@@ -515,11 +550,10 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
process_mirror_stats(*mirror_stats.stats())
|
||||
|
||||
|
||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
def mirror_destroy(args):
|
||||
|
@@ -377,7 +377,10 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
"refresh": {
|
||||
"installed": True,
|
||||
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
|
||||
}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
@@ -116,39 +116,38 @@ def ipython_interpreter(args):
|
||||
|
||||
def python_interpreter(args):
|
||||
"""A python interpreter is the default interpreter"""
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
elif args.python_args:
|
||||
propagate_exceptions_from(console)
|
||||
if args.python_args and not args.python_command:
|
||||
sys.argv = args.python_args
|
||||
with open(args.python_args[0]) as file:
|
||||
console.runsource(file.read(), args.python_args[0], "exec")
|
||||
runpy.run_path(args.python_args[0], run_name="__main__")
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def propagate_exceptions_from(console):
|
||||
|
@@ -91,7 +91,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
|
@@ -23,7 +23,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.21"
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -34,6 +34,13 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="show full pytest help, with advanced options",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-n",
|
||||
"--numprocesses",
|
||||
type=int,
|
||||
default=1,
|
||||
help="run tests in parallel up to this wide, default 1 for sequential",
|
||||
)
|
||||
|
||||
# extra spack arguments to list tests
|
||||
list_group = subparser.add_argument_group("listing tests")
|
||||
@@ -207,8 +214,6 @@ def unit_test(parser, args, unknown_args):
|
||||
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
@@ -229,6 +234,16 @@ def unit_test(parser, args, unknown_args):
|
||||
if args.extension:
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
if args.numprocesses is not None and args.numprocesses > 1:
|
||||
pytest_args.extend(
|
||||
[
|
||||
"--dist",
|
||||
"loadfile",
|
||||
"--tx",
|
||||
f"{args.numprocesses}*popen//python=spack-tmpconfig spack python",
|
||||
]
|
||||
)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with llnl.util.filesystem.working_dir(pytest_root):
|
||||
if args.list:
|
||||
|
@@ -38,10 +38,10 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.filesystem_view as fsv
|
||||
import spack.schema.projections
|
||||
import spack.store
|
||||
from spack.config import validate
|
||||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||
from spack.util import spack_yaml as s_yaml
|
||||
|
||||
description = "project packages to a compact naming scheme on the filesystem"
|
||||
@@ -193,17 +193,13 @@ def view(parser, args):
|
||||
ordered_projections = {}
|
||||
|
||||
# What method are we using for this view
|
||||
if args.action in actions_link:
|
||||
link_fn = view_func_parser(args.action)
|
||||
else:
|
||||
link_fn = view_func_parser("symlink")
|
||||
|
||||
view = YamlFilesystemView(
|
||||
link_type = args.action if args.action in actions_link else "symlink"
|
||||
view = fsv.YamlFilesystemView(
|
||||
path,
|
||||
spack.store.STORE.layout,
|
||||
projections=ordered_projections,
|
||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||
link=link_fn,
|
||||
link_type=link_type,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
|
||||
|
@@ -20,8 +20,10 @@
|
||||
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
@@ -107,7 +109,6 @@ def _parse_link_paths(string):
|
||||
"""
|
||||
lib_search_paths = False
|
||||
raw_link_dirs = []
|
||||
tty.debug("parsing implicit link info")
|
||||
for line in string.splitlines():
|
||||
if lib_search_paths:
|
||||
if line.startswith("\t"):
|
||||
@@ -122,7 +123,7 @@ def _parse_link_paths(string):
|
||||
continue
|
||||
if _LINKER_LINE_IGNORE.match(line):
|
||||
continue
|
||||
tty.debug("linker line: %s" % line)
|
||||
tty.debug(f"implicit link dirs: link line: {line}")
|
||||
|
||||
next_arg = False
|
||||
for arg in line.split():
|
||||
@@ -138,15 +139,12 @@ def _parse_link_paths(string):
|
||||
link_dir_arg = _LINK_DIR_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
tty.debug("linkdir: %s" % link_dir)
|
||||
raw_link_dirs.append(link_dir)
|
||||
|
||||
link_dir_arg = _LIBPATH_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
tty.debug("libpath: %s", link_dir)
|
||||
raw_link_dirs.append(link_dir)
|
||||
tty.debug("found raw link dirs: %s" % ", ".join(raw_link_dirs))
|
||||
|
||||
implicit_link_dirs = list()
|
||||
visited = set()
|
||||
@@ -156,7 +154,7 @@ def _parse_link_paths(string):
|
||||
implicit_link_dirs.append(normalized_path)
|
||||
visited.add(normalized_path)
|
||||
|
||||
tty.debug("found link dirs: %s" % ", ".join(implicit_link_dirs))
|
||||
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@@ -292,7 +290,7 @@ def __init__(
|
||||
operating_system,
|
||||
target,
|
||||
paths,
|
||||
modules=None,
|
||||
modules: Optional[List[str]] = None,
|
||||
alias=None,
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
@@ -417,17 +415,35 @@ def real_version(self):
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
|
||||
def implicit_rpaths(self):
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
return []
|
||||
|
||||
# Put CXX first since it has the most linking issues
|
||||
# And because it has flags that affect linking
|
||||
link_dirs = self._get_compiler_link_paths()
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return []
|
||||
|
||||
link_dirs = _parse_non_system_link_dirs(output)
|
||||
|
||||
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return None
|
||||
|
||||
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
||||
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
|
||||
|
||||
@property
|
||||
def required_libs(self):
|
||||
"""For executables created with this compiler, the compiler libraries
|
||||
@@ -436,17 +452,17 @@ def required_libs(self):
|
||||
# By default every compiler returns the empty list
|
||||
return []
|
||||
|
||||
def _get_compiler_link_paths(self):
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
if not hasattr(self, "_compile_c_source_output"):
|
||||
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||
return self._compile_c_source_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if not cc or not self.verbose_flag:
|
||||
# Cannot determine implicit link paths without a compiler / verbose flag
|
||||
return []
|
||||
|
||||
# What flag types apply to first_compiler, in what order
|
||||
if cc == self.cc:
|
||||
flags = ["cflags", "cppflags", "ldflags"]
|
||||
else:
|
||||
flags = ["cxxflags", "cppflags", "ldflags"]
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
@@ -458,20 +474,19 @@ def _get_compiler_link_paths(self):
|
||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||
)
|
||||
cc_exe = spack.util.executable.Executable(cc)
|
||||
for flag_type in flags:
|
||||
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
|
||||
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
||||
|
||||
with self.compiler_environment():
|
||||
output = cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
return _parse_non_system_link_dirs(output)
|
||||
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
except spack.util.executable.ProcessError as pe:
|
||||
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
||||
return []
|
||||
return None
|
||||
finally:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
def verbose_flag(self) -> Optional[str]:
|
||||
"""
|
||||
This property should be overridden in the compiler subclass if a
|
||||
verbose flag is available.
|
||||
@@ -669,8 +684,8 @@ def __str__(self):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def compiler_environment(self):
|
||||
# yield immediately if no modules
|
||||
if not self.modules:
|
||||
# Avoid modifying os.environ if possible.
|
||||
if not self.modules and not self.environment:
|
||||
yield
|
||||
return
|
||||
|
||||
@@ -687,13 +702,9 @@ def compiler_environment(self):
|
||||
spack.util.module_cmd.load_module(module)
|
||||
|
||||
# apply other compiler environment changes
|
||||
env = spack.util.environment.EnvironmentModifications()
|
||||
env.extend(spack.schema.environment.parse(self.environment))
|
||||
env.apply_modifications()
|
||||
spack.schema.environment.parse(self.environment).apply_modifications()
|
||||
|
||||
yield
|
||||
except BaseException:
|
||||
raise
|
||||
finally:
|
||||
# Restore environment regardless of whether inner code succeeded
|
||||
os.environ.clear()
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -109,27 +110,33 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(scope=None):
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = spack.config.get("packages", scope=scope)
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
@@ -157,43 +164,66 @@ def _compiler_config_from_package_config(config):
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
extra_attributes_key = "extra_attributes"
|
||||
compilers_key = "compilers"
|
||||
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
||||
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
extra_attributes = config.get("extra_attributes", {})
|
||||
prefix = config.get("prefix", None)
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
|
||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
||||
paths = extra_attributes.get("paths", {})
|
||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
||||
for lang in compiler_langs:
|
||||
if paths.setdefault(lang, None):
|
||||
continue
|
||||
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
# Check for files that satisfy the naming scheme for this compiler
|
||||
bindir = os.path.join(prefix, "bin")
|
||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
||||
if regex.match(f):
|
||||
paths[lang] = os.path.join(bindir, f)
|
||||
|
||||
if all(v is None for v in paths.values()):
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if extra_attributes_key not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
||||
return None
|
||||
extra_attributes = config[extra_attributes_key]
|
||||
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
||||
if compilers_key not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[compilers_key]
|
||||
|
||||
if c_key not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{extra_attributes_key}:{compilers_key}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[c_key]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if cxx_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if fortran_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
paths = {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(cxx_key, None),
|
||||
"fc": attribute_compilers.get(fortran_key, None),
|
||||
"f77": attribute_compilers.get(fortran_key, None),
|
||||
}
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.target
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
host_platform = spack.platforms.host()
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target.microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
@@ -216,13 +246,15 @@ def _compiler_config_from_external(config):
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -233,7 +265,7 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
@@ -246,7 +278,9 @@ def add_compilers_to_config(compilers, scope=None):
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -295,7 +329,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -310,21 +346,28 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(scope)
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(scope, init_config)
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"])
|
||||
# Dedupe entries by the compiler they represent
|
||||
# If the entry is invalid, treat it as unique for deduplication
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
@@ -332,7 +375,7 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(scope, init_config)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
@@ -492,11 +535,20 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = all_compilers_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items) # can be None in error case
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
return compilers
|
||||
|
||||
|
||||
@@ -507,7 +559,7 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(scope, init_config)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -517,7 +569,7 @@ def compilers_for_spec(
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(scope)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
@@ -603,7 +655,10 @@ def _compiler_from_config_entry(items):
|
||||
compiler = _compiler_cache.get(config_id, None)
|
||||
|
||||
if compiler is None:
|
||||
compiler = compiler_from_dict(items)
|
||||
try:
|
||||
compiler = compiler_from_dict(items)
|
||||
except UnknownCompilerError as e:
|
||||
warnings.warn(e.message)
|
||||
_compiler_cache[config_id] = compiler
|
||||
|
||||
return compiler
|
||||
@@ -656,7 +711,9 @@ def get_compilers(config, cspec=None, arch_spec=None):
|
||||
raise ValueError(msg)
|
||||
continue
|
||||
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
@@ -933,10 +990,11 @@ def _default_make_compilers(cmp_id, paths):
|
||||
make_mixed_toolchain(flat_compilers)
|
||||
|
||||
# Finally, create the compiler list
|
||||
compilers = []
|
||||
compilers: List["spack.compiler.Compiler"] = []
|
||||
for compiler_id, _, compiler in flat_compilers:
|
||||
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
||||
compilers.extend(make_compilers(compiler_id, compiler))
|
||||
candidates = make_compilers(compiler_id, compiler)
|
||||
compilers.extend(x for x in candidates if x.cc is not None)
|
||||
|
||||
return compilers
|
||||
|
||||
|
@@ -38,10 +38,10 @@ class Clang(Compiler):
|
||||
cxx_names = ["clang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["flang"]
|
||||
f77_names = ["flang-new", "flang"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang"]
|
||||
fc_names = ["flang-new", "flang"]
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@@ -171,10 +171,11 @@ def extract_version_from_output(cls, output):
|
||||
|
||||
match = re.search(
|
||||
# Normal clang compiler versions are left as-is
|
||||
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
# Don't include hyphenated patch numbers in the version
|
||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||
r"clang version ([^ )\n]+?)-[~.\w\d-]*|" r"clang version ([^ )\n]+)",
|
||||
r"(?:clang|flang-new) version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)",
|
||||
output,
|
||||
)
|
||||
if match:
|
||||
|
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compilers.oneapi
|
||||
|
||||
|
||||
class Dpcpp(spack.compilers.oneapi.Oneapi):
|
||||
"""This is the same as the oneAPI compiler but uses dpcpp instead of
|
||||
icpx (for DPC++ source files). It explicitly refers to dpcpp, so that
|
||||
CMake test files which check the compiler name (e.g. CMAKE_CXX_COMPILER)
|
||||
detect it as dpcpp.
|
||||
|
||||
Ideally we could switch out icpx for dpcpp where needed in the oneAPI
|
||||
compiler definition, but two things are needed for that: (a) a way to
|
||||
tell the compiler that it should be using dpcpp and (b) a way to
|
||||
customize the link_paths
|
||||
|
||||
See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
|
||||
"""
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["dpcpp"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("oneapi", "icx"),
|
||||
"cxx": os.path.join("oneapi", "dpcpp"),
|
||||
"f77": os.path.join("oneapi", "ifx"),
|
||||
"fc": os.path.join("oneapi", "ifx"),
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user