Compare commits
649 Commits
docs/no-se
...
e4s-24.05
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c7c9474e89 | ||
![]() |
7c1a309453 | ||
![]() |
78b6fa96e5 | ||
![]() |
1b315a9ede | ||
![]() |
82df0e549d | ||
![]() |
f5591f9068 | ||
![]() |
98c08d277d | ||
![]() |
facca4e2c8 | ||
![]() |
764029bcd1 | ||
![]() |
44cb4eca93 | ||
![]() |
39888d4df6 | ||
![]() |
f68ea49e54 | ||
![]() |
78b5e4cdfa | ||
![]() |
26515b8871 | ||
![]() |
74640987c7 | ||
![]() |
d6154645c7 | ||
![]() |
faed43704b | ||
![]() |
6fba31ce34 | ||
![]() |
112cead00b | ||
![]() |
9e2558bd56 | ||
![]() |
019058226f | ||
![]() |
ac0040f67d | ||
![]() |
38f341f12d | ||
![]() |
26ad22743f | ||
![]() |
46c2b8a565 | ||
![]() |
5cbb59f2b8 | ||
![]() |
f29fa1cfdf | ||
![]() |
c69951d6e1 | ||
![]() |
f406f27d9c | ||
![]() |
36ea208e12 | ||
![]() |
17e0774189 | ||
![]() |
3162c2459d | ||
![]() |
7cad6c62a3 | ||
![]() |
eb2ddf6fa2 | ||
![]() |
2bc2902fed | ||
![]() |
b362362291 | ||
![]() |
32bb5c7523 | ||
![]() |
a2b76c68a0 | ||
![]() |
62132919e1 | ||
![]() |
b06929f6df | ||
![]() |
0f33de157b | ||
![]() |
03a074ebe7 | ||
![]() |
4d12b6a4fd | ||
![]() |
26bb15e1fb | ||
![]() |
1bf92c7881 | ||
![]() |
eefe0b2eec | ||
![]() |
de6c6f0cd9 | ||
![]() |
309d3aa1ec | ||
![]() |
feff11f914 | ||
![]() |
de3b324983 | ||
![]() |
747cd374df | ||
![]() |
8b3ac40436 | ||
![]() |
28e9be443c | ||
![]() |
1381bede80 | ||
![]() |
6502785908 | ||
![]() |
53257408a3 | ||
![]() |
28d02dff60 | ||
![]() |
9d60b42a97 | ||
![]() |
9ff5a30574 | ||
![]() |
9a6c013365 | ||
![]() |
9f62a3e819 | ||
![]() |
e380e9a0ab | ||
![]() |
8415ea9ada | ||
![]() |
6960766e0c | ||
![]() |
0c2ca8c841 | ||
![]() |
273960fdbb | ||
![]() |
0cd2a1102c | ||
![]() |
e40676e901 | ||
![]() |
4ddb07e94f | ||
![]() |
50585d55c5 | ||
![]() |
5d6b5f3f6f | ||
![]() |
2351c19489 | ||
![]() |
08d49361f0 | ||
![]() |
c3c63e5ca4 | ||
![]() |
e72d4075bd | ||
![]() |
f9f97bf22b | ||
![]() |
8033455d5f | ||
![]() |
50a5a6fea4 | ||
![]() |
0de8a0e3f3 | ||
![]() |
0a26e74cc8 | ||
![]() |
9dfd91efbb | ||
![]() |
1a7baadbff | ||
![]() |
afcfd56ae5 | ||
![]() |
7eb2e704b6 | ||
![]() |
564b4fa263 | ||
![]() |
0a941b43ca | ||
![]() |
35ff24ddea | ||
![]() |
7019e4e3cb | ||
![]() |
cb16b8a047 | ||
![]() |
381acb3726 | ||
![]() |
d87ea0b256 | ||
![]() |
1a757e7f70 | ||
![]() |
704e2c53a8 | ||
![]() |
478d8a668c | ||
![]() |
7903f9fcfd | ||
![]() |
670d3d3fdc | ||
![]() |
e8aab6b31c | ||
![]() |
1ce408ecc5 | ||
![]() |
dc81a2dcdb | ||
![]() |
b10f51f020 | ||
![]() |
4f4e3f5607 | ||
![]() |
00fb80e766 | ||
![]() |
057603cad8 | ||
![]() |
5b8b6e492d | ||
![]() |
763279cd61 | ||
![]() |
e4237b9153 | ||
![]() |
d288658cf0 | ||
![]() |
2c22ae0576 | ||
![]() |
fc3fc94689 | ||
![]() |
b5013c1372 | ||
![]() |
e220674c4d | ||
![]() |
7f13518225 | ||
![]() |
96a13a97e6 | ||
![]() |
6d244b3f67 | ||
![]() |
6bc66db141 | ||
![]() |
acfb2b9270 | ||
![]() |
d92a2c31fb | ||
![]() |
e32561aff6 | ||
![]() |
4b0479159f | ||
![]() |
03bfd36926 | ||
![]() |
4d30c8dce4 | ||
![]() |
49d4104f22 | ||
![]() |
07fb83b493 | ||
![]() |
263007ba81 | ||
![]() |
3b6e99381f | ||
![]() |
a30af1ac54 | ||
![]() |
294742ab7b | ||
![]() |
6391559fb6 | ||
![]() |
d4d4f813a9 | ||
![]() |
4667163dc4 | ||
![]() |
439f105285 | ||
![]() |
f65b1fd7b6 | ||
![]() |
d23e06c27e | ||
![]() |
b76e9a887b | ||
![]() |
55ffd439ce | ||
![]() |
d8a7b88e7b | ||
![]() |
aaa1bb1d98 | ||
![]() |
0d94b8044b | ||
![]() |
5a52780f7c | ||
![]() |
dd0a8452ee | ||
![]() |
c467bba73e | ||
![]() |
d680a0cb99 | ||
![]() |
efadee26ef | ||
![]() |
2077b3a006 | ||
![]() |
8e0c659b51 | ||
![]() |
863ab5a597 | ||
![]() |
db4e76ab27 | ||
![]() |
6728a46a84 | ||
![]() |
5a09459dd5 | ||
![]() |
7e14ff806a | ||
![]() |
7e88cf795c | ||
![]() |
1536e3d422 | ||
![]() |
1fe8e63481 | ||
![]() |
dfca2c285e | ||
![]() |
2686f778fa | ||
![]() |
925e9c73b1 | ||
![]() |
aba447e885 | ||
![]() |
1113de0dad | ||
![]() |
4110225166 | ||
![]() |
24c839c837 | ||
![]() |
42c6a6b189 | ||
![]() |
b0ea1c6f24 | ||
![]() |
735102eb2b | ||
![]() |
2e3cdb349b | ||
![]() |
05c8030119 | ||
![]() |
bbcd4224fa | ||
![]() |
4c0cdb99b3 | ||
![]() |
f22d009c6d | ||
![]() |
c5a3e36ad0 | ||
![]() |
1c76ba1c3e | ||
![]() |
b969f739bd | ||
![]() |
4788c4774c | ||
![]() |
34de028dbc | ||
![]() |
a69254fd79 | ||
![]() |
af5f205759 | ||
![]() |
77f9100a59 | ||
![]() |
386bb71392 | ||
![]() |
0676d6457f | ||
![]() |
0b80e36867 | ||
![]() |
4c9816f10c | ||
![]() |
fb6741cf85 | ||
![]() |
3f2fa256fc | ||
![]() |
d5c8864942 | ||
![]() |
b3cef1072d | ||
![]() |
e8ae9a403c | ||
![]() |
1a8ef161c8 | ||
![]() |
d3913938bc | ||
![]() |
4179880fe6 | ||
![]() |
125dd0368e | ||
![]() |
fd68f8916c | ||
![]() |
93e6f5fa4e | ||
![]() |
54acda3f11 | ||
![]() |
663e20fcc4 | ||
![]() |
6428132ebb | ||
![]() |
171958cf09 | ||
![]() |
0d0f7ab030 | ||
![]() |
35f8b43a54 | ||
![]() |
6f7eb3750c | ||
![]() |
2121eb31ba | ||
![]() |
c68d739825 | ||
![]() |
c468697b35 | ||
![]() |
c4094cf051 | ||
![]() |
9ff9ca61e6 | ||
![]() |
826e0c0405 | ||
![]() |
1b86a842ea | ||
![]() |
558a28bf52 | ||
![]() |
411576e1fa | ||
![]() |
cab4f92960 | ||
![]() |
c6c13f6782 | ||
![]() |
cf11fab5ad | ||
![]() |
1d8b35c840 | ||
![]() |
5dc46a976d | ||
![]() |
05f5596cdd | ||
![]() |
6942c7f35b | ||
![]() |
18f0ac0f94 | ||
![]() |
d9196ee3f8 | ||
![]() |
ef0bb6fe6b | ||
![]() |
3fed320013 | ||
![]() |
1aa77e695d | ||
![]() |
3a0efeecf1 | ||
![]() |
5ffb5657c9 | ||
![]() |
2b3e7fd10a | ||
![]() |
cb315e18f0 | ||
![]() |
10c637aca0 | ||
![]() |
fb4e1cad45 | ||
![]() |
3054b71e2e | ||
![]() |
47163f7435 | ||
![]() |
e322a8382f | ||
![]() |
53fb4795ca | ||
![]() |
4517c7fa9b | ||
![]() |
efaed17f91 | ||
![]() |
2c17cd365d | ||
![]() |
dfe537f688 | ||
![]() |
be0002b460 | ||
![]() |
743ee5f3de | ||
![]() |
b6caf0156f | ||
![]() |
ec00ffc244 | ||
![]() |
f020256b9f | ||
![]() |
04377e39e0 | ||
![]() |
ba2703fea6 | ||
![]() |
92b1c8f763 | ||
![]() |
2b29ecd9b6 | ||
![]() |
5b43bf1b58 | ||
![]() |
37d9770e02 | ||
![]() |
0e016ba6f5 | ||
![]() |
7afa949da1 | ||
![]() |
b81d7d0aac | ||
![]() |
e78484f501 | ||
![]() |
6fd43b4e75 | ||
![]() |
14edb55288 | ||
![]() |
f062f1c5b3 | ||
![]() |
7756c8f4fc | ||
![]() |
69c8a9e4ba | ||
![]() |
47c0736952 | ||
![]() |
8b89287084 | ||
![]() |
8bd6283b52 | ||
![]() |
179e4f3ad1 | ||
![]() |
e97787691b | ||
![]() |
5932ee901c | ||
![]() |
3bdebeba3c | ||
![]() |
d390ee1902 | ||
![]() |
4f9fe6f9bf | ||
![]() |
df6d6d9b5c | ||
![]() |
e57d33b29f | ||
![]() |
85c6d6dbab | ||
![]() |
5f9228746e | ||
![]() |
9f2451ddff | ||
![]() |
a05eb11b7b | ||
![]() |
ae2d0ff1cd | ||
![]() |
7e906ced75 | ||
![]() |
647e89f6bc | ||
![]() |
3239c29fb0 | ||
![]() |
abced0e87d | ||
![]() |
300fc2ee42 | ||
![]() |
13c4258e54 | ||
![]() |
f29cb7f953 | ||
![]() |
826b8f25c5 | ||
![]() |
ebaeea7820 | ||
![]() |
f76eb993aa | ||
![]() |
0b2c370a83 | ||
![]() |
6a9ee480bf | ||
![]() |
cc80d52b62 | ||
![]() |
b9c7d3b89b | ||
![]() |
c1be6a5483 | ||
![]() |
42550208c3 | ||
![]() |
be231face6 | ||
![]() |
89ac747a76 | ||
![]() |
5d8f36d667 | ||
![]() |
6c3fed351f | ||
![]() |
b9cbd15674 | ||
![]() |
b8f633246a | ||
![]() |
a2f3e98ab9 | ||
![]() |
acffe37313 | ||
![]() |
249e5415e8 | ||
![]() |
e2a942d07e | ||
![]() |
32deca2a4c | ||
![]() |
e4c64865f1 | ||
![]() |
1175f37577 | ||
![]() |
faa183331f | ||
![]() |
bbac33871c | ||
![]() |
6d4dd33c46 | ||
![]() |
579bad05a8 | ||
![]() |
27a8eb0f68 | ||
![]() |
4cd993070f | ||
![]() |
4c55c6a268 | ||
![]() |
a4a27fb1e4 | ||
![]() |
66345e7185 | ||
![]() |
8f76f1b0d8 | ||
![]() |
4cab6f3af5 | ||
![]() |
0d4665583b | ||
![]() |
5d0ef9e4f4 | ||
![]() |
e145baf619 | ||
![]() |
6c912b30a2 | ||
![]() |
f4da453f6b | ||
![]() |
7e9caed8c2 | ||
![]() |
69509a6d9a | ||
![]() |
0841050d20 | ||
![]() |
321ffd732b | ||
![]() |
22922323e3 | ||
![]() |
0b5b192c18 | ||
![]() |
1275c57d88 | ||
![]() |
29a39ac6a0 | ||
![]() |
ae9c86a930 | ||
![]() |
83199a981d | ||
![]() |
ed40c3210e | ||
![]() |
be96460ab2 | ||
![]() |
95caf55fe7 | ||
![]() |
960af24270 | ||
![]() |
899bef2aa8 | ||
![]() |
f0f092d9f1 | ||
![]() |
6eaac2270d | ||
![]() |
a9f3f6c007 | ||
![]() |
08a04ebd46 | ||
![]() |
d8e642ecb7 | ||
![]() |
669ed69d8e | ||
![]() |
7ebb21a0da | ||
![]() |
93ffa9ba5d | ||
![]() |
e5fdb90496 | ||
![]() |
303a0b3653 | ||
![]() |
9f07544bde | ||
![]() |
9b046a39a8 | ||
![]() |
0c9a53ba3a | ||
![]() |
1fd4353289 | ||
![]() |
fcb8ed6409 | ||
![]() |
2f11862832 | ||
![]() |
bff11ce8e7 | ||
![]() |
218693431c | ||
![]() |
e036cd9ef6 | ||
![]() |
cd5bef6780 | ||
![]() |
159e9a20d1 | ||
![]() |
99bb288db7 | ||
![]() |
99744a766b | ||
![]() |
ddd8be51a0 | ||
![]() |
bba66b1063 | ||
![]() |
1c3c21d9c7 | ||
![]() |
cbe9b3d01c | ||
![]() |
0abf5ba43c | ||
![]() |
9ab3c1332b | ||
![]() |
b6425da50f | ||
![]() |
937a4dbf69 | ||
![]() |
cd779ee54d | ||
![]() |
7ddcb13325 | ||
![]() |
7666046ce3 | ||
![]() |
8e89e61402 | ||
![]() |
d0dbfaa5d6 | ||
![]() |
26f562b5a7 | ||
![]() |
2967804da1 | ||
![]() |
c3eaf4d6cf | ||
![]() |
397334a4be | ||
![]() |
434836be81 | ||
![]() |
7b9b976f40 | ||
![]() |
4746e8a048 | ||
![]() |
69c684fef9 | ||
![]() |
2314aeb884 | ||
![]() |
d33e10a695 | ||
![]() |
7668a0889a | ||
![]() |
d7a74bde9f | ||
![]() |
fedf8128ae | ||
![]() |
f70af2cc57 | ||
![]() |
50562e6a0e | ||
![]() |
4ac51b2127 | ||
![]() |
81c9e346dc | ||
![]() |
73e16a7881 | ||
![]() |
af8868fa47 | ||
![]() |
cfd4e356f8 | ||
![]() |
fc87dcad4c | ||
![]() |
65472159c7 | ||
![]() |
d1f9d8f06d | ||
![]() |
67ac9c46a8 | ||
![]() |
aa39465188 | ||
![]() |
09810a5e7c | ||
![]() |
446c0f2325 | ||
![]() |
c4ce51c9be | ||
![]() |
1f63a764ac | ||
![]() |
384e198304 | ||
![]() |
2303332415 | ||
![]() |
0eb1957999 | ||
![]() |
de1f9593c6 | ||
![]() |
65fa71c1b4 | ||
![]() |
9802649716 | ||
![]() |
8d9d721f07 | ||
![]() |
ecef72c471 | ||
![]() |
485b6e2170 | ||
![]() |
ba02c6b70f | ||
![]() |
7028669d50 | ||
![]() |
2f0a73f7ef | ||
![]() |
7cb0dbf77a | ||
![]() |
ac8800ffc7 | ||
![]() |
eb11fa7d18 | ||
![]() |
4d8381a775 | ||
![]() |
de5e20fc21 | ||
![]() |
c33af49ed5 | ||
![]() |
3addda6c4d | ||
![]() |
33f6f55d6b | ||
![]() |
41d20d3731 | ||
![]() |
dde8fa5561 | ||
![]() |
588a94bc8c | ||
![]() |
06392f2c01 | ||
![]() |
f16e29559e | ||
![]() |
ea96403157 | ||
![]() |
b659eac453 | ||
![]() |
ab590cc03a | ||
![]() |
1a007a842b | ||
![]() |
9756354998 | ||
![]() |
3984dd750c | ||
![]() |
d5c1e16e43 | ||
![]() |
56ace9a087 | ||
![]() |
6e0bab1706 | ||
![]() |
193386f6ac | ||
![]() |
755131fcdf | ||
![]() |
9a71733adb | ||
![]() |
cd919d51ea | ||
![]() |
12adf66d07 | ||
![]() |
c02f58da8f | ||
![]() |
9662d181a0 | ||
![]() |
282df7aecc | ||
![]() |
b4c0e6f03b | ||
![]() |
4cd8488139 | ||
![]() |
69a052841c | ||
![]() |
a3f39890c2 | ||
![]() |
02d126ce2b | ||
![]() |
339a63370f | ||
![]() |
fef6aed627 | ||
![]() |
3445da807e | ||
![]() |
429c3598af | ||
![]() |
3d8136493a | ||
![]() |
8cd160db85 | ||
![]() |
a7dd756b34 | ||
![]() |
53be280681 | ||
![]() |
5ab10d57be | ||
![]() |
96061d2c00 | ||
![]() |
e78d20dc84 | ||
![]() |
6d2341c109 | ||
![]() |
968ad02473 | ||
![]() |
b93882804f | ||
![]() |
f58ebd4fbb | ||
![]() |
6f7f9528e5 | ||
![]() |
59c7ff8683 | ||
![]() |
4495e0341d | ||
![]() |
ba39924046 | ||
![]() |
751c3fef86 | ||
![]() |
102811adb9 | ||
![]() |
8f56eb620f | ||
![]() |
ec517b40e9 | ||
![]() |
22cb3815fe | ||
![]() |
f549354f78 | ||
![]() |
dc212d0e59 | ||
![]() |
8f14acb139 | ||
![]() |
c38ef72b06 | ||
![]() |
7d67d9ece4 | ||
![]() |
2c30962c74 | ||
![]() |
cc28334049 | ||
![]() |
dbdf5bacc4 | ||
![]() |
531f01f0b9 | ||
![]() |
794593b478 | ||
![]() |
afcf0d2e39 | ||
![]() |
29ee861366 | ||
![]() |
b1a984ef02 | ||
![]() |
cc545d8c9a | ||
![]() |
49ff816fb0 | ||
![]() |
8c33841567 | ||
![]() |
21b50fbbe3 | ||
![]() |
2a8e503a04 | ||
![]() |
4b695d4722 | ||
![]() |
2fa816184e | ||
![]() |
25f622e809 | ||
![]() |
7506acabe7 | ||
![]() |
3a828358cb | ||
![]() |
94a1d1414a | ||
![]() |
0f080b38f4 | ||
![]() |
f1ec4859c8 | ||
![]() |
63baba0308 | ||
![]() |
aeec861544 | ||
![]() |
e54d4678f9 | ||
![]() |
187b8adb4f | ||
![]() |
d6fd96f024 | ||
![]() |
e3b6d2c3c7 | ||
![]() |
1e9c46296c | ||
![]() |
48183b37be | ||
![]() |
9a3d248348 | ||
![]() |
03e22adb5b | ||
![]() |
5f5fc78236 | ||
![]() |
e12a8a69c7 | ||
![]() |
001af62585 | ||
![]() |
f5e89df6f2 | ||
![]() |
ce75adada6 | ||
![]() |
24d37df1a2 | ||
![]() |
a9d294c532 | ||
![]() |
9dcaa56db4 | ||
![]() |
98162aa2e1 | ||
![]() |
3934df622c | ||
![]() |
dbf5d79557 | ||
![]() |
97e29e501d | ||
![]() |
258c651a8f | ||
![]() |
43ca6da346 | ||
![]() |
9786bd932b | ||
![]() |
c72619d4db | ||
![]() |
8ecae17c46 | ||
![]() |
1e47ccb83a | ||
![]() |
d6421a69eb | ||
![]() |
000dff2fd4 | ||
![]() |
1e413477dd | ||
![]() |
8955e63a68 | ||
![]() |
bf14b424bb | ||
![]() |
14209a86a6 | ||
![]() |
b7d9900764 | ||
![]() |
bc155e7b90 | ||
![]() |
65f9ba345f | ||
![]() |
ca49bc5652 | ||
![]() |
b84b85a7e0 | ||
![]() |
016cdba16f | ||
![]() |
4806e6549f | ||
![]() |
c14b277150 | ||
![]() |
919025d9f3 | ||
![]() |
52f57c90eb | ||
![]() |
ee1fa3e50c | ||
![]() |
772928241b | ||
![]() |
7440bb4c36 | ||
![]() |
c464866deb | ||
![]() |
799a8a5090 | ||
![]() |
c218ee50e9 | ||
![]() |
8ff7a20320 | ||
![]() |
e3fe6bc0f7 | ||
![]() |
c6fcb1068f | ||
![]() |
54ac3e72ed | ||
![]() |
274fbebc4c | ||
![]() |
d40eb19918 | ||
![]() |
31de670bd2 | ||
![]() |
6c0961549b | ||
![]() |
c090bc5ebe | ||
![]() |
bca4d37d76 | ||
![]() |
9b484d2eea | ||
![]() |
a57b0e1e2d | ||
![]() |
e3cb3b29d9 | ||
![]() |
ac48ecd375 | ||
![]() |
0bb20d34db | ||
![]() |
971fda5c33 | ||
![]() |
dcc4423a9d | ||
![]() |
82c380b563 | ||
![]() |
8bcf6a31ae | ||
![]() |
ddd88e266a | ||
![]() |
08c597d83e | ||
![]() |
bf5340755d | ||
![]() |
f8e70a0c96 | ||
![]() |
7e468aefd5 | ||
![]() |
e685d04f84 | ||
![]() |
9d962f55b0 | ||
![]() |
00d3066b97 | ||
![]() |
5ca0dcecb2 | ||
![]() |
fa8fb7903b | ||
![]() |
f35ff441f2 | ||
![]() |
9ea9ee05c8 | ||
![]() |
24ddc49c1b | ||
![]() |
2f4266161c | ||
![]() |
7bcb0fff7d | ||
![]() |
cd332c6370 | ||
![]() |
6daf9677f3 | ||
![]() |
cb6450977d | ||
![]() |
bf62ac0769 | ||
![]() |
0223fe746b | ||
![]() |
12fba13441 | ||
![]() |
0c44f5a140 | ||
![]() |
f4853790c5 | ||
![]() |
9ed2e396f4 | ||
![]() |
3ee6fc937e | ||
![]() |
c9b6cc9a58 | ||
![]() |
58b394bcec | ||
![]() |
4d89eeca9b | ||
![]() |
bfc71e9dae | ||
![]() |
f061dcda74 | ||
![]() |
cc460894fd | ||
![]() |
5e09660e87 | ||
![]() |
5a8efb3b14 | ||
![]() |
99002027c4 | ||
![]() |
a247879be3 | ||
![]() |
7b46993fed | ||
![]() |
dd59f4ba34 | ||
![]() |
18ab14e659 | ||
![]() |
28eb5e1bf6 | ||
![]() |
c658ddbfa3 | ||
![]() |
12963c894f | ||
![]() |
61fa12508f | ||
![]() |
daf6acef6e | ||
![]() |
d30621e787 | ||
![]() |
dd4b365608 | ||
![]() |
157d47fc5a | ||
![]() |
13daa1b692 | ||
![]() |
f923e650f9 | ||
![]() |
1a1bbb8af2 | ||
![]() |
594fcc3c80 | ||
![]() |
76ec19b26e | ||
![]() |
00baaf868e | ||
![]() |
3b06347f65 | ||
![]() |
5b9e207db2 | ||
![]() |
d6fd9017c4 | ||
![]() |
913d79238e | ||
![]() |
250038fa9b | ||
![]() |
26c553fce7 | ||
![]() |
e24c242fb7 | ||
![]() |
ca14ce2629 | ||
![]() |
44f443946c | ||
![]() |
6e6bc89bda | ||
![]() |
8714ea6652 | ||
![]() |
df92f0a7d4 | ||
![]() |
d24b91157c | ||
![]() |
1a0f77388c | ||
![]() |
34571d4ad6 | ||
![]() |
a574f40732 | ||
![]() |
d4ffe244af | ||
![]() |
e08e66ad89 | ||
![]() |
0543710258 | ||
![]() |
5d994e48d5 | ||
![]() |
d1fa23e9c6 | ||
![]() |
f1db8b7871 | ||
![]() |
c5cca54c27 | ||
![]() |
a9c1648db8 | ||
![]() |
3bd911377e | ||
![]() |
fcb2f7d3aa | ||
![]() |
a8a9e0160a | ||
![]() |
9ca6aaeafd | ||
![]() |
aed5c39312 | ||
![]() |
eb36bb2a8f | ||
![]() |
8dcf860888 | ||
![]() |
a5b7cb6e6f | ||
![]() |
34c0bfefa6 | ||
![]() |
ea5db048f3 | ||
![]() |
e68a17f2c6 | ||
![]() |
4af9ec3d8a | ||
![]() |
eb90e2c894 |
4
.devcontainer/devcontainer.json
Normal file
4
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
20
.devcontainer/postCreateCommand.sh
Executable file
20
.devcontainer/postCreateCommand.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Load spack environment at terminal startup
|
||||
cat <<EOF >> /root/.bashrc
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
EOF
|
||||
|
||||
# Load spack environment in this script
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
|
||||
# Ensure generic targets for maximum matching with buildcaches
|
||||
spack config --scope site add "packages:all:require:[target=x86_64_v3]"
|
||||
spack config --scope site add "concretizer:targets:granularity:generic"
|
||||
|
||||
# Find compiler and install gcc-runtime
|
||||
spack compiler find --scope site
|
||||
|
||||
# Setup buildcaches
|
||||
spack mirror add --scope site develop https://binaries.spack.io/develop
|
||||
spack buildcache keys --install --trust
|
8
.github/workflows/audit.yaml
vendored
8
.github/workflows/audit.yaml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -43,7 +43,9 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1 # @v2.1.0
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
24
.github/workflows/bootstrap.yml
vendored
24
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,8 +158,8 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
@@ -182,7 +182,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -207,7 +207,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -250,7 +250,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -287,7 +287,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -338,7 +338,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
10
.github/workflows/build-containers.yml
vendored
10
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -96,10 +96,10 @@ jobs:
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -107,13 +107,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
all-prechecks:
|
||||
@@ -35,12 +36,12 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@ebc4d7e9ebcb0b1eb21480bb8f43113e996ac77a
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
@@ -70,14 +71,17 @@ jobs:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==24.2.0
|
||||
black==24.4.0
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
|
37
.github/workflows/unit_tests.yaml
vendored
37
.github/workflows/unit_tests.yaml
vendored
@@ -51,10 +51,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -91,17 +91,19 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -122,9 +124,11 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
@@ -137,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -156,10 +160,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -181,20 +185,23 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1 # @v2.1.0
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -216,6 +223,8 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
11
.github/workflows/valid-style.yml
vendored
11
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -56,6 +56,7 @@ jobs:
|
||||
share/spack/qa/run-style-tests
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
@@ -69,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
20
.github/workflows/windows_python.yml
vendored
20
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -33,16 +33,18 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,16 +59,18 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@0cfda1dd0a4ad9efc75517f399d859cd1ea4ced1
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -88,7 +88,7 @@ Resources:
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
@@ -42,3 +42,8 @@ concretizer:
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
@@ -101,6 +101,12 @@ config:
|
||||
verify_ssl: true
|
||||
|
||||
|
||||
# This is where custom certs for proxy/firewall are stored.
|
||||
# It can be a path or environment variable. To match ssl env configuration
|
||||
# the default is the environment variable SSL_CERT_FILE
|
||||
ssl_certs: $SSL_CERT_FILE
|
||||
|
||||
|
||||
# Suppress gpg warnings from binary package verification
|
||||
# Only suppresses warnings, gpg failure will still fail the install
|
||||
# Potential rationale to set True: users have already explicitly trusted the
|
||||
|
@@ -24,6 +24,7 @@ packages:
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
@@ -34,7 +35,9 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
|
@@ -1119,6 +1119,9 @@ and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
|
||||
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
||||
includes any version with major version ``3``.
|
||||
|
||||
Versions are ordered lexicograpically by its components. For more details
|
||||
on the order, see :ref:`the packaging guide <version-comparison>`.
|
||||
|
||||
Notice that you can distinguish between the specific version ``@=3.2`` and
|
||||
the range ``@3.2``. This is useful for packages that follow a versioning
|
||||
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
||||
|
@@ -220,6 +220,40 @@ section of the configuration:
|
||||
|
||||
.. _binary_caches_oci:
|
||||
|
||||
---------------------------------
|
||||
Automatic push to a build cache
|
||||
---------------------------------
|
||||
|
||||
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --autopush <name> <url or path>
|
||||
|
||||
Or the autopush flag can be set for an existing mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
|
||||
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
|
||||
|
||||
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
|
||||
will have the same effect as
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
$ spack buildcache push <cache> <package> # for all caches with autopush: true
|
||||
|
||||
.. note::
|
||||
|
||||
Packages are automatically pushed to a build cache only if they are built from source.
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
@@ -87,7 +87,7 @@ You can check what is installed in the bootstrapping store at any time using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack find -b
|
||||
% spack -b find
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 11 installed packages
|
||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||
@@ -101,7 +101,7 @@ In case it is needed you can remove all the software in the current bootstrappin
|
||||
% spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||
|
||||
% spack find -b
|
||||
% spack -b find
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
@@ -175,4 +175,4 @@ bootstrapping.
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
||||
|
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = "Ninja"
|
||||
generator("ninja")
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
|
@@ -173,6 +173,72 @@ arguments to ``Makefile.PL`` or ``Build.PL`` by overriding
|
||||
]
|
||||
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
^^^^^^^
|
||||
|
||||
``PerlPackage`` provides a simple stand-alone test of the successfully
|
||||
installed package to confirm that installed perl module(s) can be used.
|
||||
These tests can be performed any time after the installation using
|
||||
``spack -v test run``. (For more information on the command, see
|
||||
:ref:`cmd-spack-test-run`.)
|
||||
|
||||
The base class automatically detects perl modules based on the presence
|
||||
of ``*.pm`` files under the package's library directory. For example,
|
||||
the files under ``perl-bignum``'s perl library are:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find . -name "*.pm"
|
||||
./bigfloat.pm
|
||||
./bigrat.pm
|
||||
./Math/BigFloat/Trace.pm
|
||||
./Math/BigInt/Trace.pm
|
||||
./Math/BigRat/Trace.pm
|
||||
./bigint.pm
|
||||
./bignum.pm
|
||||
|
||||
|
||||
which results in the package having the ``use_modules`` property containing:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
use_modules = [
|
||||
"bigfloat",
|
||||
"bigrat",
|
||||
"Math::BigFloat::Trace",
|
||||
"Math::BigInt::Trace",
|
||||
"Math::BigRat::Trace",
|
||||
"bigint",
|
||||
"bignum",
|
||||
]
|
||||
|
||||
.. note::
|
||||
|
||||
This list can often be used to catch missing dependencies.
|
||||
|
||||
If the list is somehow wrong, you can provide the names of the modules
|
||||
yourself by overriding ``use_modules`` like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
use_modules = ["bigfloat", "bigrat", "bigint", "bignum"]
|
||||
|
||||
If you only want a subset of the automatically detected modules to be
|
||||
tested, you could instead define the ``skip_modules`` property on the
|
||||
package. So, instead of overriding ``use_modules`` as shown above, you
|
||||
could define the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
skip_modules = [
|
||||
"Math::BigFloat::Trace",
|
||||
"Math::BigInt::Trace",
|
||||
"Math::BigRat::Trace",
|
||||
]
|
||||
|
||||
for the same use tests.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -145,6 +145,22 @@ hosts when making ``ssl`` connections. Set to ``false`` to disable, and
|
||||
tools like ``curl`` will use their ``--insecure`` options. Disabling
|
||||
this can expose you to attacks. Use at your own risk.
|
||||
|
||||
--------------------
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to a file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||
in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
|
||||
--------------------
|
||||
``checksum``
|
||||
--------------------
|
||||
|
@@ -73,9 +73,12 @@ are six configuration scopes. From lowest to highest:
|
||||
Spack instance per project) or for site-wide settings on a multi-user
|
||||
machine (e.g., for a common Spack instance).
|
||||
|
||||
#. **plugin**: Read from a Python project's entry points. Settings here affect
|
||||
all instances of Spack running with the same Python installation. This scope takes higher precedence than site, system, and default scopes.
|
||||
|
||||
#. **user**: Stored in the home directory: ``~/.spack/``. These settings
|
||||
affect all instances of Spack and take higher precedence than site,
|
||||
system, or defaults scopes.
|
||||
system, plugin, or defaults scopes.
|
||||
|
||||
#. **custom**: Stored in a custom directory specified by ``--config-scope``.
|
||||
If multiple scopes are listed on the command line, they are ordered
|
||||
@@ -196,6 +199,45 @@ with MPICH. You can create different configuration scopes for use with
|
||||
mpi: [mpich]
|
||||
|
||||
|
||||
.. _plugin-scopes:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Plugin scopes
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
.. note::
|
||||
Python version >= 3.8 is required to enable plugin configuration.
|
||||
|
||||
Spack can be made aware of configuration scopes that are installed as part of a python package. To do so, register a function that returns the scope's path to the ``"spack.config"`` entry point. Consider the Python package ``my_package`` that includes Spack configurations:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
my-package/
|
||||
├── src
|
||||
│ ├── my_package
|
||||
│ │ ├── __init__.py
|
||||
│ │ └── spack/
|
||||
│ │ │ └── config.yaml
|
||||
└── pyproject.toml
|
||||
|
||||
adding the following to ``my_package``'s ``pyproject.toml`` will make ``my_package``'s ``spack/`` configurations visible to Spack when ``my_package`` is installed:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[project.entry_points."spack.config"]
|
||||
my_package = "my_package:get_config_path"
|
||||
|
||||
The function ``my_package.get_extension_path`` in ``my_package/__init__.py`` might look like
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import importlib.resources
|
||||
|
||||
def get_config_path():
|
||||
dirname = importlib.resources.files("my_package").joinpath("spack")
|
||||
if dirname.exists():
|
||||
return str(dirname)
|
||||
|
||||
.. _platform-scopes:
|
||||
|
||||
------------------------
|
||||
|
@@ -1071,9 +1071,9 @@ Announcing a release
|
||||
|
||||
We announce releases in all of the major Spack communication channels.
|
||||
Publishing the release takes care of GitHub. The remaining channels are
|
||||
Twitter, Slack, and the mailing list. Here are the steps:
|
||||
X, Slack, and the mailing list. Here are the steps:
|
||||
|
||||
#. Announce the release on Twitter.
|
||||
#. Announce the release on X.
|
||||
|
||||
* Compose the tweet on the ``@spackpm`` account per the
|
||||
``spack-twitter`` slack channel.
|
||||
|
@@ -952,6 +952,17 @@ function, as shown in the example below:
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
|
||||
Projections also permit environment and spack configuration variable
|
||||
expansions as shown below:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}/$date/$SYSTEM_ENV_VARIBLE"
|
||||
|
||||
where ``$date`` is the spack configuration variable that will expand with the ``YYYY-MM-DD``
|
||||
format and ``$SYSTEM_ENV_VARIABLE`` is an environment variable defined in the shell.
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
be the first non-``all`` entry that the spec satisfies, or ``all`` if
|
||||
|
@@ -111,3 +111,39 @@ The corresponding unit tests can be run giving the appropriate options to ``spac
|
||||
|
||||
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||
=========================================== 5 passed in 5.06s ============================================
|
||||
|
||||
---------------------------------------
|
||||
Registering Extensions via Entry Points
|
||||
---------------------------------------
|
||||
|
||||
.. note::
|
||||
Python version >= 3.8 is required to register extensions via entry points.
|
||||
|
||||
Spack can be made aware of extensions that are installed as part of a python package. To do so, register a function that returns the extension path, or paths, to the ``"spack.extensions"`` entry point. Consider the Python package ``my_package`` that includes a Spack extension:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
my-package/
|
||||
├── src
|
||||
│ ├── my_package
|
||||
│ │ └── __init__.py
|
||||
│ └── spack-scripting/ # the spack extensions
|
||||
└── pyproject.toml
|
||||
|
||||
adding the following to ``my_package``'s ``pyproject.toml`` will make the ``spack-scripting`` extension visible to Spack when ``my_package`` is installed:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[project.entry_points."spack.extenions"]
|
||||
my_package = "my_package:get_extension_path"
|
||||
|
||||
The function ``my_package.get_extension_path`` in ``my_package/__init__.py`` might look like
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import importlib.resources
|
||||
|
||||
def get_extension_path():
|
||||
dirname = importlib.resources.files("my_package").joinpath("spack-scripting")
|
||||
if dirname.exists():
|
||||
return str(dirname)
|
||||
|
@@ -250,9 +250,10 @@ Compiler configuration
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml``, or automatically by
|
||||
running ``spack compiler find``, but for convenience Spack will
|
||||
automatically detect compilers the first time it needs them.
|
||||
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``, but for
|
||||
convenience Spack will automatically detect compilers the first time
|
||||
it needs them.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -457,6 +458,48 @@ specification. The operations available to modify the environment are ``set``, `
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
.. note::
|
||||
|
||||
Spack is in the process of moving compilers from a separate
|
||||
attribute to be handled like all other packages. As part of this
|
||||
process, the ``compilers.yaml`` section will eventually be replaced
|
||||
by configuration in the ``packages.yaml`` section. This new
|
||||
configuration is now available, although it is not yet the default
|
||||
behavior.
|
||||
|
||||
Compilers can also be configured as external packages in the
|
||||
``packages.yaml`` config file. Any external package for a compiler
|
||||
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||
assuming the paths to the compiler executables are determinable from
|
||||
the prefix.
|
||||
|
||||
If the paths to the compiler executable are not determinable from the
|
||||
prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||
all other fields from the compilers config can be added to the
|
||||
``extra_attributes`` field for an external representing a compiler.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
external:
|
||||
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
environment:
|
||||
set:
|
||||
GCC_ROOT: /usr
|
||||
external:
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
paths:
|
||||
cc: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fc: /usr/bin/gfortran
|
||||
f77: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
@@ -1529,6 +1572,8 @@ Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||
visual studio installation as it is required to build many packages from source.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
@@ -1536,6 +1581,7 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
@@ -273,9 +273,21 @@ builtin support through the ``depends_on`` function, the latter simply uses a ``
|
||||
statement. Both module systems (at least in newer versions) do reference counting, so that if a
|
||||
module is loaded by two different modules, it will only be unloaded after the others are.
|
||||
|
||||
The ``autoload`` key accepts the values ``none``, ``direct``, and ``all``. To disable it, use
|
||||
``none``, and to enable, it's best to stick to ``direct``, which only autoloads the direct link and
|
||||
run type dependencies, relying on recursive autoloading to load the rest.
|
||||
The ``autoload`` key accepts the values:
|
||||
|
||||
* ``none``: no autoloading
|
||||
* ``run``: autoload direct *run* type dependencies
|
||||
* ``direct``: autoload direct *link and run* type dependencies
|
||||
* ``all``: autoload all dependencies
|
||||
|
||||
In case of ``run`` and ``direct``, a ``module load`` triggers a recursive load.
|
||||
|
||||
The ``direct`` option is most correct: there are cases where pure link dependencies need to set
|
||||
variables for themselves, or need to have variables of their own dependencies set.
|
||||
|
||||
In practice however, ``run`` is often sufficient, and may make ``module load`` snappier.
|
||||
|
||||
The ``all`` option is discouraged and seldomly used.
|
||||
|
||||
A common complaint about autoloading is the large number of modules that are visible to the user.
|
||||
Spack has a solution for this as well: ``hide_implicits: true``. This ensures that only those
|
||||
@@ -297,11 +309,11 @@ Environment Modules requires version 4.7 or higher.
|
||||
tcl:
|
||||
hide_implicits: true
|
||||
all:
|
||||
autoload: direct
|
||||
autoload: direct # or `run`
|
||||
lmod:
|
||||
hide_implicits: true
|
||||
all:
|
||||
autoload: direct
|
||||
autoload: direct # or `run`
|
||||
|
||||
.. _anonymous_specs:
|
||||
|
||||
|
@@ -893,26 +893,50 @@ as an option to the ``version()`` directive. Example situations would be a
|
||||
"snapshot"-like Version Control System (VCS) tag, a VCS branch such as
|
||||
``v6-16-00-patches``, or a URL specifying a regularly updated snapshot tarball.
|
||||
|
||||
|
||||
.. _version-comparison:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Version comparison
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack imposes a generic total ordering on the set of versions,
|
||||
independently from the package they are associated with.
|
||||
|
||||
Most Spack versions are numeric, a tuple of integers; for example,
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. Spack knows how to compare and sort
|
||||
numeric versions.
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. In this very basic case, version
|
||||
comparison is lexicographical on the numeric components:
|
||||
``1.2 < 1.2.1 < 1.2.2 < 1.10``.
|
||||
|
||||
Some Spack versions involve slight extensions of numeric syntax; for
|
||||
example, ``py-sphinx-rtd-theme@=0.1.10a0``. In this case, numbers are
|
||||
always considered to be "newer" than letters. This is for consistency
|
||||
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
|
||||
Spack can also supports string components such as ``1.1.1a`` and
|
||||
``1.y.0``. String components are considered less than numeric
|
||||
components, so ``1.y.0 < 1.0``. This is for consistency with
|
||||
`RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_. String
|
||||
components do not have to be separated by dots or any other delimiter.
|
||||
So, the contrived version ``1y0`` is identical to ``1.y.0``.
|
||||
|
||||
Spack versions may also be arbitrary non-numeric strings, for example
|
||||
``develop``, ``master``, ``local``.
|
||||
Pre-release suffixes also contain string parts, but they are handled
|
||||
in a special way. For example ``1.2.3alpha1`` is parsed as a pre-release
|
||||
of the version ``1.2.3``. This allows Spack to order it before the
|
||||
actual release: ``1.2.3alpha1 < 1.2.3``. Spack supports alpha, beta and
|
||||
release candidate suffixes: ``1.2alpha1 < 1.2beta1 < 1.2rc1 < 1.2``. Any
|
||||
suffix not recognized as a pre-release is treated as an ordinary
|
||||
string component, so ``1.2 < 1.2-mysuffix``.
|
||||
|
||||
The order on versions is defined as follows. A version string is split
|
||||
into a list of components based on delimiters such as ``.``, ``-`` etc.
|
||||
Lists are then ordered lexicographically, where components are ordered
|
||||
as follows:
|
||||
Finally, there are a few special string components that are considered
|
||||
"infinity versions". They include ``develop``, ``main``, ``master``,
|
||||
``head``, ``trunk``, and ``stable``. For example: ``1.2 < develop``.
|
||||
These are useful for specifying the most recent development version of
|
||||
a package (often a moving target like a git branch), without assigning
|
||||
a specific version number. Infinity versions are not automatically used when determining the latest version of a package unless explicitly required by another package or user.
|
||||
|
||||
More formally, the order on versions is defined as follows. A version
|
||||
string is split into a list of components based on delimiters such as
|
||||
``.`` and ``-`` and string boundaries. The components are split into
|
||||
the **release** and a possible **pre-release** (if the last component
|
||||
is numeric and the second to last is a string ``alpha``, ``beta`` or ``rc``).
|
||||
The release components are ordered lexicographically, with comparsion
|
||||
between different types of components as follows:
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
@@ -925,6 +949,9 @@ as follows:
|
||||
#. All other non-numeric components are less than numeric components,
|
||||
and are ordered alphabetically.
|
||||
|
||||
Finally, if the release components are equal, the pre-release components
|
||||
are used to break the tie, in the obvious way.
|
||||
|
||||
The logic behind this sort order is two-fold:
|
||||
|
||||
#. Non-numeric versions are usually used for special cases while
|
||||
|
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.0.2
|
||||
pytest==8.1.1
|
||||
isort==5.13.2
|
||||
black==24.2.0
|
||||
black==24.4.0
|
||||
flake8==7.0.0
|
||||
mypy==1.8.0
|
||||
mypy==1.9.0
|
||||
|
249
lib/spack/env/cc
vendored
249
lib/spack/env/cc
vendored
@@ -47,7 +47,8 @@ SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS"
|
||||
SPACK_SYSTEM_DIRS
|
||||
SPACK_MANAGED_DIRS"
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
@@ -173,21 +174,17 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# system_dir PATH
|
||||
# test whether a path is a system directory
|
||||
system_dir() {
|
||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
||||
path="$1"
|
||||
for sd in $SPACK_SYSTEM_DIRS; do
|
||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
||||
# success if path starts with a system prefix
|
||||
unset IFS
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
return 1 # fail if path starts no system prefix
|
||||
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||
# moving the eval inside the function would eval it every call.
|
||||
eval "\
|
||||
path_order() {
|
||||
case \"\$1\" in
|
||||
$SPACK_MANAGED_DIRS) return 0 ;;
|
||||
$SPACK_SYSTEM_DIRS) return 2 ;;
|
||||
/*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
"
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
@@ -248,7 +245,7 @@ case "$command" in
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
@@ -420,11 +417,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -432,21 +430,25 @@ parse_Wl() {
|
||||
arg="${1#-rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -473,12 +475,20 @@ categorize_arguments() {
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
@@ -526,7 +536,7 @@ categorize_arguments() {
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
@@ -546,29 +556,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -601,29 +614,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -661,16 +677,25 @@ categorize_arguments() {
|
||||
}
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -738,16 +763,25 @@ esac
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
@@ -767,11 +801,13 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
@@ -798,38 +834,50 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend include_dirs_list SPACK_INCLUDE_DIRS
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Finally, reassemble the command line.
|
||||
#
|
||||
args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
@@ -839,8 +887,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
@@ -848,8 +900,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
@@ -913,4 +969,3 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e)
|
||||
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
||||
|
3
lib/spack/external/archspec/__init__.py
vendored
3
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,3 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.2.2"
|
||||
|
||||
__version__ = "0.2.3"
|
||||
|
1
lib/spack/external/archspec/__main__.py
vendored
1
lib/spack/external/archspec/__main__.py
vendored
@@ -3,6 +3,7 @@
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from .cli import main
|
||||
|
||||
sys.exit(main())
|
||||
|
6
lib/spack/external/archspec/cli.py
vendored
6
lib/spack/external/archspec/cli.py
vendored
@@ -46,7 +46,11 @@ def _make_parser() -> argparse.ArgumentParser:
|
||||
|
||||
def cpu() -> int:
|
||||
"""Run the `archspec cpu` subcommand."""
|
||||
print(archspec.cpu.host())
|
||||
try:
|
||||
print(archspec.cpu.host())
|
||||
except FileNotFoundError as exc:
|
||||
print(exc)
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
|
10
lib/spack/external/archspec/cpu/__init__.py
vendored
10
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,10 +5,14 @@
|
||||
"""The "cpu" package permits to query and compare different
|
||||
CPU microarchitectures.
|
||||
"""
|
||||
from .microarchitecture import Microarchitecture, UnsupportedMicroarchitecture
|
||||
from .microarchitecture import TARGETS, generic_microarchitecture
|
||||
from .microarchitecture import version_components
|
||||
from .detect import host
|
||||
from .microarchitecture import (
|
||||
TARGETS,
|
||||
Microarchitecture,
|
||||
UnsupportedMicroarchitecture,
|
||||
generic_microarchitecture,
|
||||
version_components,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Microarchitecture",
|
||||
|
372
lib/spack/external/archspec/cpu/detect.py
vendored
372
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -4,15 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Detection of CPU microarchitectures"""
|
||||
import collections
|
||||
import functools
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import struct
|
||||
import subprocess
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
from .microarchitecture import generic_microarchitecture, TARGETS
|
||||
from .schema import TARGETS_JSON
|
||||
from ..vendor.cpuid.cpuid import CPUID
|
||||
from .microarchitecture import TARGETS, Microarchitecture, generic_microarchitecture
|
||||
from .schema import CPUID_JSON, TARGETS_JSON
|
||||
|
||||
#: Mapping from operating systems to chain of commands
|
||||
#: to obtain a dictionary of raw info on the current cpu
|
||||
@@ -22,43 +24,46 @@
|
||||
#: functions checking the compatibility of the host with a given target
|
||||
COMPATIBILITY_CHECKS = {}
|
||||
|
||||
# Constants for commonly used architectures
|
||||
X86_64 = "x86_64"
|
||||
AARCH64 = "aarch64"
|
||||
PPC64LE = "ppc64le"
|
||||
PPC64 = "ppc64"
|
||||
RISCV64 = "riscv64"
|
||||
|
||||
def info_dict(operating_system):
|
||||
"""Decorator to mark functions that are meant to return raw info on
|
||||
the current cpu.
|
||||
|
||||
def detection(operating_system: str):
|
||||
"""Decorator to mark functions that are meant to return partial information on the current cpu.
|
||||
|
||||
Args:
|
||||
operating_system (str or tuple): operating system for which the marked
|
||||
function is a viable factory of raw info dictionaries.
|
||||
operating_system: operating system where this function can be used.
|
||||
"""
|
||||
|
||||
def decorator(factory):
|
||||
INFO_FACTORY[operating_system].append(factory)
|
||||
|
||||
@functools.wraps(factory)
|
||||
def _impl():
|
||||
info = factory()
|
||||
|
||||
# Check that info contains a few mandatory fields
|
||||
msg = 'field "{0}" is missing from raw info dictionary'
|
||||
assert "vendor_id" in info, msg.format("vendor_id")
|
||||
assert "flags" in info, msg.format("flags")
|
||||
assert "model" in info, msg.format("model")
|
||||
assert "model_name" in info, msg.format("model_name")
|
||||
|
||||
return info
|
||||
|
||||
return _impl
|
||||
return factory
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@info_dict(operating_system="Linux")
|
||||
def proc_cpuinfo():
|
||||
"""Returns a raw info dictionary by parsing the first entry of
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
def partial_uarch(
|
||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||
) -> Microarchitecture:
|
||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||
return Microarchitecture(
|
||||
name=name,
|
||||
parents=[],
|
||||
vendor=vendor,
|
||||
features=features or set(),
|
||||
compilers={},
|
||||
generation=generation,
|
||||
)
|
||||
|
||||
|
||||
@detection(operating_system="Linux")
|
||||
def proc_cpuinfo() -> Microarchitecture:
|
||||
"""Returns a partial Microarchitecture, obtained from scanning ``/proc/cpuinfo``"""
|
||||
data = {}
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
@@ -70,11 +75,96 @@ def proc_cpuinfo():
|
||||
#
|
||||
# we are on a blank line separating two cpus. Exit early as
|
||||
# we want to read just the first entry in /proc/cpuinfo
|
||||
if separator != ":" and info:
|
||||
if separator != ":" and data:
|
||||
break
|
||||
|
||||
info[key.strip()] = value.strip()
|
||||
return info
|
||||
data[key.strip()] = value.strip()
|
||||
|
||||
architecture = _machine()
|
||||
if architecture == X86_64:
|
||||
return partial_uarch(
|
||||
vendor=data.get("vendor_id", "generic"), features=_feature_set(data, key="flags")
|
||||
)
|
||||
|
||||
if architecture == AARCH64:
|
||||
return partial_uarch(
|
||||
vendor=_canonicalize_aarch64_vendor(data),
|
||||
features=_feature_set(data, key="Features"),
|
||||
)
|
||||
|
||||
if architecture in (PPC64LE, PPC64):
|
||||
generation_match = re.search(r"POWER(\d+)", data.get("cpu", ""))
|
||||
try:
|
||||
generation = int(generation_match.group(1))
|
||||
except AttributeError:
|
||||
# There might be no match under emulated environments. For instance
|
||||
# emulating a ppc64le with QEMU and Docker still reports the host
|
||||
# /proc/cpuinfo and not a Power
|
||||
generation = 0
|
||||
return partial_uarch(generation=generation)
|
||||
|
||||
if architecture == RISCV64:
|
||||
if data.get("uarch") == "sifive,u74-mc":
|
||||
data["uarch"] = "u74mc"
|
||||
return partial_uarch(name=data.get("uarch", RISCV64))
|
||||
|
||||
return generic_microarchitecture(architecture)
|
||||
|
||||
|
||||
class CpuidInfoCollector:
|
||||
"""Collects the information we need on the host CPU from cpuid"""
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
def __init__(self):
|
||||
self.cpuid = CPUID()
|
||||
|
||||
registers = self.cpuid.registers_for(**CPUID_JSON["vendor"]["input"])
|
||||
self.highest_basic_support = registers.eax
|
||||
self.vendor = struct.pack("III", registers.ebx, registers.edx, registers.ecx).decode(
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
registers = self.cpuid.registers_for(**CPUID_JSON["highest_extension_support"]["input"])
|
||||
self.highest_extension_support = registers.eax
|
||||
|
||||
self.features = self._features()
|
||||
|
||||
def _features(self):
|
||||
result = set()
|
||||
|
||||
def check_features(data):
|
||||
registers = self.cpuid.registers_for(**data["input"])
|
||||
for feature_check in data["bits"]:
|
||||
current = getattr(registers, feature_check["register"])
|
||||
if self._is_bit_set(current, feature_check["bit"]):
|
||||
result.add(feature_check["name"])
|
||||
|
||||
for call_data in CPUID_JSON["flags"]:
|
||||
if call_data["input"]["eax"] > self.highest_basic_support:
|
||||
continue
|
||||
check_features(call_data)
|
||||
|
||||
for call_data in CPUID_JSON["extension-flags"]:
|
||||
if call_data["input"]["eax"] > self.highest_extension_support:
|
||||
continue
|
||||
check_features(call_data)
|
||||
|
||||
return result
|
||||
|
||||
def _is_bit_set(self, register: int, bit: int) -> bool:
|
||||
mask = 1 << bit
|
||||
return register & mask > 0
|
||||
|
||||
|
||||
@detection(operating_system="Windows")
|
||||
def cpuid_info():
|
||||
"""Returns a partial Microarchitecture, obtained from running the cpuid instruction"""
|
||||
architecture = _machine()
|
||||
if architecture == X86_64:
|
||||
data = CpuidInfoCollector()
|
||||
return partial_uarch(vendor=data.vendor, features=data.features)
|
||||
|
||||
return generic_microarchitecture(architecture)
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
@@ -83,14 +173,25 @@ def _check_output(args, env):
|
||||
return str(output.decode("utf-8"))
|
||||
|
||||
|
||||
WINDOWS_MAPPING = {
|
||||
"AMD64": "x86_64",
|
||||
"ARM64": "aarch64",
|
||||
}
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
"""Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
# If we are not on Darwin or Windows, trust what Python tells us
|
||||
if operating_system not in ("Darwin", "Windows"):
|
||||
return platform.machine()
|
||||
|
||||
# Normalize windows specific names
|
||||
if operating_system == "Windows":
|
||||
platform_machine = platform.machine()
|
||||
return WINDOWS_MAPPING.get(platform_machine, platform_machine)
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
@@ -103,54 +204,47 @@ def _machine():
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
return AARCH64
|
||||
|
||||
return "x86_64"
|
||||
return X86_64
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
@detection(operating_system="Darwin")
|
||||
def sysctl_info() -> Microarchitecture:
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
|
||||
def sysctl(*args):
|
||||
def sysctl(*args: str) -> str:
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if _machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
+ sysctl("-n", "machdep.cpu.leaf7_features").lower()
|
||||
if _machine() == X86_64:
|
||||
features = (
|
||||
f'{sysctl("-n", "machdep.cpu.features").lower()} '
|
||||
f'{sysctl("-n", "machdep.cpu.leaf7_features").lower()}'
|
||||
)
|
||||
info = {
|
||||
"vendor_id": sysctl("-n", "machdep.cpu.vendor"),
|
||||
"flags": flags,
|
||||
"model": sysctl("-n", "machdep.cpu.model"),
|
||||
"model name": sysctl("-n", "machdep.cpu.brand_string"),
|
||||
}
|
||||
else:
|
||||
model = "unknown"
|
||||
model_str = sysctl("-n", "machdep.cpu.brand_string").lower()
|
||||
if "m2" in model_str:
|
||||
model = "m2"
|
||||
elif "m1" in model_str:
|
||||
model = "m1"
|
||||
elif "apple" in model_str:
|
||||
model = "m1"
|
||||
features = set(features.split())
|
||||
|
||||
info = {
|
||||
"vendor_id": "Apple",
|
||||
"flags": [],
|
||||
"model": model,
|
||||
"CPU implementer": "Apple",
|
||||
"model name": sysctl("-n", "machdep.cpu.brand_string"),
|
||||
}
|
||||
return info
|
||||
# Flags detected on Darwin turned to their linux counterpart
|
||||
for darwin_flag, linux_flag in TARGETS_JSON["conversions"]["darwin_flags"].items():
|
||||
if darwin_flag in features:
|
||||
features.update(linux_flag.split())
|
||||
|
||||
return partial_uarch(vendor=sysctl("-n", "machdep.cpu.vendor"), features=features)
|
||||
|
||||
model = "unknown"
|
||||
model_str = sysctl("-n", "machdep.cpu.brand_string").lower()
|
||||
if "m2" in model_str:
|
||||
model = "m2"
|
||||
elif "m1" in model_str:
|
||||
model = "m1"
|
||||
elif "apple" in model_str:
|
||||
model = "m1"
|
||||
|
||||
return partial_uarch(name=model, vendor="Apple")
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
@@ -160,22 +254,10 @@ def _ensure_bin_usrbin_in_path():
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
"""
|
||||
# Flags detected on Darwin turned to their linux counterpart
|
||||
flags = info.get("flags", [])
|
||||
d2l = TARGETS_JSON["conversions"]["darwin_flags"]
|
||||
for darwin_flag, linux_flag in d2l.items():
|
||||
if darwin_flag in flags:
|
||||
info["flags"] += " " + linux_flag
|
||||
|
||||
|
||||
def adjust_raw_vendor(info):
|
||||
"""Adjust the vendor field to make it human readable"""
|
||||
if "CPU implementer" not in info:
|
||||
return
|
||||
def _canonicalize_aarch64_vendor(data: Dict[str, str]) -> str:
|
||||
"""Adjust the vendor field to make it human-readable"""
|
||||
if "CPU implementer" not in data:
|
||||
return "generic"
|
||||
|
||||
# Mapping numeric codes to vendor (ARM). This list is a merge from
|
||||
# different sources:
|
||||
@@ -185,43 +267,37 @@ def adjust_raw_vendor(info):
|
||||
# https://github.com/gcc-mirror/gcc/blob/master/gcc/config/aarch64/aarch64-cores.def
|
||||
# https://patchwork.kernel.org/patch/10524949/
|
||||
arm_vendors = TARGETS_JSON["conversions"]["arm_vendors"]
|
||||
arm_code = info["CPU implementer"]
|
||||
if arm_code in arm_vendors:
|
||||
info["CPU implementer"] = arm_vendors[arm_code]
|
||||
arm_code = data["CPU implementer"]
|
||||
return arm_vendors.get(arm_code, arm_code)
|
||||
|
||||
|
||||
def raw_info_dictionary():
|
||||
"""Returns a dictionary with information on the cpu of the current host.
|
||||
def _feature_set(data: Dict[str, str], key: str) -> Set[str]:
|
||||
return set(data.get(key, "").split())
|
||||
|
||||
This function calls all the viable factories one after the other until
|
||||
there's one that is able to produce the requested information.
|
||||
|
||||
def detected_info() -> Microarchitecture:
|
||||
"""Returns a partial Microarchitecture with information on the CPU of the current host.
|
||||
|
||||
This function calls all the viable factories one after the other until there's one that is
|
||||
able to produce the requested information. Falls-back to a generic microarchitecture, if none
|
||||
of the calls succeed.
|
||||
"""
|
||||
# pylint: disable=broad-except
|
||||
info = {}
|
||||
for factory in INFO_FACTORY[platform.system()]:
|
||||
try:
|
||||
info = factory()
|
||||
return factory()
|
||||
except Exception as exc:
|
||||
warnings.warn(str(exc))
|
||||
|
||||
if info:
|
||||
adjust_raw_flags(info)
|
||||
adjust_raw_vendor(info)
|
||||
break
|
||||
|
||||
return info
|
||||
return generic_microarchitecture(_machine())
|
||||
|
||||
|
||||
def compatible_microarchitectures(info):
|
||||
"""Returns an unordered list of known micro-architectures that are
|
||||
compatible with the info dictionary passed as argument.
|
||||
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
def compatible_microarchitectures(info: Microarchitecture) -> List[Microarchitecture]:
|
||||
"""Returns an unordered list of known micro-architectures that are compatible with the
|
||||
partial Microarchitecture passed as input.
|
||||
"""
|
||||
architecture_family = _machine()
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
# If a tester is not registered, assume no known target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
return [x for x in TARGETS.values() if tester(info, x)] or [
|
||||
generic_microarchitecture(architecture_family)
|
||||
@@ -230,8 +306,8 @@ def compatible_microarchitectures(info):
|
||||
|
||||
def host():
|
||||
"""Detects the host micro-architecture and returns it."""
|
||||
# Retrieve a dictionary with raw information on the host's cpu
|
||||
info = raw_info_dictionary()
|
||||
# Retrieve information on the host's cpu
|
||||
info = detected_info()
|
||||
|
||||
# Get a list of possible candidates for this micro-architecture
|
||||
candidates = compatible_microarchitectures(info)
|
||||
@@ -258,16 +334,15 @@ def sorting_fn(item):
|
||||
return max(candidates, key=sorting_fn)
|
||||
|
||||
|
||||
def compatibility_check(architecture_family):
|
||||
def compatibility_check(architecture_family: Union[str, Tuple[str, ...]]):
|
||||
"""Decorator to register a function as a proper compatibility check.
|
||||
|
||||
A compatibility check function takes the raw info dictionary as a first
|
||||
argument and an arbitrary target as the second argument. It returns True
|
||||
if the target is compatible with the info dictionary, False otherwise.
|
||||
A compatibility check function takes a partial Microarchitecture object as a first argument,
|
||||
and an arbitrary target Microarchitecture as the second argument. It returns True if the
|
||||
target is compatible with first argument, False otherwise.
|
||||
|
||||
Args:
|
||||
architecture_family (str or tuple): architecture family for which
|
||||
this test can be used, e.g. x86_64 or ppc64le etc.
|
||||
architecture_family: architecture family for which this test can be used
|
||||
"""
|
||||
# Turn the argument into something iterable
|
||||
if isinstance(architecture_family, str):
|
||||
@@ -280,86 +355,57 @@ def decorator(func):
|
||||
return decorator
|
||||
|
||||
|
||||
@compatibility_check(architecture_family=("ppc64le", "ppc64"))
|
||||
@compatibility_check(architecture_family=(PPC64LE, PPC64))
|
||||
def compatibility_check_for_power(info, target):
|
||||
"""Compatibility check for PPC64 and PPC64LE architectures."""
|
||||
basename = platform.machine()
|
||||
generation_match = re.search(r"POWER(\d+)", info.get("cpu", ""))
|
||||
try:
|
||||
generation = int(generation_match.group(1))
|
||||
except AttributeError:
|
||||
# There might be no match under emulated environments. For instance
|
||||
# emulating a ppc64le with QEMU and Docker still reports the host
|
||||
# /proc/cpuinfo and not a Power
|
||||
generation = 0
|
||||
|
||||
# We can use a target if it descends from our machine type and our
|
||||
# generation (9 for POWER9, etc) is at least its generation.
|
||||
arch_root = TARGETS[basename]
|
||||
arch_root = TARGETS[_machine()]
|
||||
return (
|
||||
target == arch_root or arch_root in target.ancestors
|
||||
) and target.generation <= generation
|
||||
) and target.generation <= info.generation
|
||||
|
||||
|
||||
@compatibility_check(architecture_family="x86_64")
|
||||
@compatibility_check(architecture_family=X86_64)
|
||||
def compatibility_check_for_x86_64(info, target):
|
||||
"""Compatibility check for x86_64 architectures."""
|
||||
basename = "x86_64"
|
||||
vendor = info.get("vendor_id", "generic")
|
||||
features = set(info.get("flags", "").split())
|
||||
|
||||
# We can use a target if it descends from our machine type, is from our
|
||||
# vendor, and we have all of its features
|
||||
arch_root = TARGETS[basename]
|
||||
arch_root = TARGETS[X86_64]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
and target.features.issubset(features)
|
||||
and target.vendor in (info.vendor, "generic")
|
||||
and target.features.issubset(info.features)
|
||||
)
|
||||
|
||||
|
||||
@compatibility_check(architecture_family="aarch64")
|
||||
@compatibility_check(architecture_family=AARCH64)
|
||||
def compatibility_check_for_aarch64(info, target):
|
||||
"""Compatibility check for AARCH64 architectures."""
|
||||
basename = "aarch64"
|
||||
features = set(info.get("Features", "").split())
|
||||
vendor = info.get("CPU implementer", "generic")
|
||||
|
||||
# At the moment it's not clear how to detect compatibility with
|
||||
# At the moment, it's not clear how to detect compatibility with
|
||||
# a specific version of the architecture
|
||||
if target.vendor == "generic" and target.name != "aarch64":
|
||||
if target.vendor == "generic" and target.name != AARCH64:
|
||||
return False
|
||||
|
||||
arch_root = TARGETS[basename]
|
||||
arch_root = TARGETS[AARCH64]
|
||||
arch_root_and_vendor = arch_root == target.family and target.vendor in (
|
||||
vendor,
|
||||
info.vendor,
|
||||
"generic",
|
||||
)
|
||||
|
||||
# On macOS it seems impossible to get all the CPU features
|
||||
# with syctl info, but for ARM we can get the exact model
|
||||
if platform.system() == "Darwin":
|
||||
model_key = info.get("model", basename)
|
||||
model = TARGETS[model_key]
|
||||
model = TARGETS[info.name]
|
||||
return arch_root_and_vendor and (target == model or target in model.ancestors)
|
||||
|
||||
return arch_root_and_vendor and target.features.issubset(features)
|
||||
return arch_root_and_vendor and target.features.issubset(info.features)
|
||||
|
||||
|
||||
@compatibility_check(architecture_family="riscv64")
|
||||
@compatibility_check(architecture_family=RISCV64)
|
||||
def compatibility_check_for_riscv64(info, target):
|
||||
"""Compatibility check for riscv64 architectures."""
|
||||
basename = "riscv64"
|
||||
uarch = info.get("uarch")
|
||||
|
||||
# sifive unmatched board
|
||||
if uarch == "sifive,u74-mc":
|
||||
uarch = "u74mc"
|
||||
# catch-all for unknown uarchs
|
||||
else:
|
||||
uarch = "riscv64"
|
||||
|
||||
arch_root = TARGETS[basename]
|
||||
arch_root = TARGETS[RISCV64]
|
||||
return (target == arch_root or arch_root in target.ancestors) and (
|
||||
target == uarch or target.vendor == "generic"
|
||||
target.name == info.name or target.vendor == "generic"
|
||||
)
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import archspec
|
||||
import archspec.cpu.alias
|
||||
import archspec.cpu.schema
|
||||
|
||||
from .alias import FEATURE_ALIASES
|
||||
from .schema import LazyDictionary
|
||||
|
||||
@@ -47,7 +48,7 @@ class Microarchitecture:
|
||||
which has "broadwell" as a parent, supports running binaries
|
||||
optimized for "broadwell".
|
||||
vendor (str): vendor of the micro-architecture
|
||||
features (list of str): supported CPU flags. Note that the semantic
|
||||
features (set of str): supported CPU flags. Note that the semantic
|
||||
of the flags in this field might vary among architectures, if
|
||||
at all present. For instance x86_64 processors will list all
|
||||
the flags supported by a given CPU while Arm processors will
|
||||
@@ -180,24 +181,28 @@ def generic(self):
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
|
||||
def to_dict(self, return_list_of_items=False):
|
||||
"""Returns a dictionary representation of this object.
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary representation of this object."""
|
||||
return {
|
||||
"name": str(self.name),
|
||||
"vendor": str(self.vendor),
|
||||
"features": sorted(str(x) for x in self.features),
|
||||
"generation": self.generation,
|
||||
"parents": [str(x) for x in self.parents],
|
||||
"compilers": self.compilers,
|
||||
}
|
||||
|
||||
Args:
|
||||
return_list_of_items (bool): if True returns an ordered list of
|
||||
items instead of the dictionary
|
||||
"""
|
||||
list_of_items = [
|
||||
("name", str(self.name)),
|
||||
("vendor", str(self.vendor)),
|
||||
("features", sorted(str(x) for x in self.features)),
|
||||
("generation", self.generation),
|
||||
("parents", [str(x) for x in self.parents]),
|
||||
]
|
||||
if return_list_of_items:
|
||||
return list_of_items
|
||||
|
||||
return dict(list_of_items)
|
||||
@staticmethod
|
||||
def from_dict(data) -> "Microarchitecture":
|
||||
"""Construct a microarchitecture from a dictionary representation."""
|
||||
return Microarchitecture(
|
||||
name=data["name"],
|
||||
parents=[TARGETS[x] for x in data["parents"]],
|
||||
vendor=data["vendor"],
|
||||
features=set(data["features"]),
|
||||
compilers=data.get("compilers", {}),
|
||||
generation=data.get("generation", 0),
|
||||
)
|
||||
|
||||
def optimization_flags(self, compiler, version):
|
||||
"""Returns a string containing the optimization flags that needs
|
||||
@@ -271,9 +276,7 @@ def tuplify(ver):
|
||||
flags = flags_fmt.format(**compiler_entry)
|
||||
return flags
|
||||
|
||||
msg = (
|
||||
"cannot produce optimized binary for micro-architecture '{0}' with {1}@{2}"
|
||||
)
|
||||
msg = "cannot produce optimized binary for micro-architecture '{0}' with {1}@{2}"
|
||||
if compiler_info:
|
||||
versions = [x["versions"] for x in compiler_info]
|
||||
msg += f' [supported compiler versions are {", ".join(versions)}]'
|
||||
@@ -289,9 +292,7 @@ def generic_microarchitecture(name):
|
||||
Args:
|
||||
name (str): name of the micro-architecture
|
||||
"""
|
||||
return Microarchitecture(
|
||||
name, parents=[], vendor="generic", features=[], compilers={}
|
||||
)
|
||||
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
|
||||
|
||||
|
||||
def version_components(version):
|
||||
@@ -345,9 +346,7 @@ def fill_target_from_dict(name, data, targets):
|
||||
compilers = values.get("compilers", {})
|
||||
generation = values.get("generation", 0)
|
||||
|
||||
targets[name] = Microarchitecture(
|
||||
name, parents, vendor, features, compilers, generation
|
||||
)
|
||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
|
68
lib/spack/external/archspec/cpu/schema.py
vendored
68
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -7,7 +7,9 @@
|
||||
"""
|
||||
import collections.abc
|
||||
import json
|
||||
import os.path
|
||||
import os
|
||||
import pathlib
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
class LazyDictionary(collections.abc.MutableMapping):
|
||||
@@ -46,21 +48,65 @@ def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
|
||||
def _load_json_file(json_file):
|
||||
json_dir = os.path.join(os.path.dirname(__file__), "..", "json", "cpu")
|
||||
json_dir = os.path.abspath(json_dir)
|
||||
#: Environment variable that might point to a directory with a user defined JSON file
|
||||
DIR_FROM_ENVIRONMENT = "ARCHSPEC_CPU_DIR"
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r", encoding="utf-8") as file:
|
||||
return json.load(file)
|
||||
#: Environment variable that might point to a directory with extensions to JSON files
|
||||
EXTENSION_DIR_FROM_ENVIRONMENT = "ARCHSPEC_EXTENSION_CPU_DIR"
|
||||
|
||||
return _factory
|
||||
|
||||
def _json_file(filename: str, allow_custom: bool = False) -> Tuple[pathlib.Path, pathlib.Path]:
|
||||
"""Given a filename, returns the absolute path for the main JSON file, and an
|
||||
optional absolute path for an extension JSON file.
|
||||
|
||||
Args:
|
||||
filename: filename for the JSON file
|
||||
allow_custom: if True, allows overriding the location where the file resides
|
||||
"""
|
||||
json_dir = pathlib.Path(__file__).parent / ".." / "json" / "cpu"
|
||||
if allow_custom and DIR_FROM_ENVIRONMENT in os.environ:
|
||||
json_dir = pathlib.Path(os.environ[DIR_FROM_ENVIRONMENT])
|
||||
json_dir = json_dir.absolute()
|
||||
json_file = json_dir / filename
|
||||
|
||||
extension_file = None
|
||||
if allow_custom and EXTENSION_DIR_FROM_ENVIRONMENT in os.environ:
|
||||
extension_dir = pathlib.Path(os.environ[EXTENSION_DIR_FROM_ENVIRONMENT])
|
||||
extension_dir.absolute()
|
||||
extension_file = extension_dir / filename
|
||||
|
||||
return json_file, extension_file
|
||||
|
||||
|
||||
def _load(json_file: pathlib.Path, extension_file: pathlib.Path):
|
||||
with open(json_file, "r", encoding="utf-8") as file:
|
||||
data = json.load(file)
|
||||
|
||||
if not extension_file or not extension_file.exists():
|
||||
return data
|
||||
|
||||
with open(extension_file, "r", encoding="utf-8") as file:
|
||||
extension_data = json.load(file)
|
||||
|
||||
top_level_sections = list(data.keys())
|
||||
for key in top_level_sections:
|
||||
if key not in extension_data:
|
||||
continue
|
||||
|
||||
data[key].update(extension_data[key])
|
||||
|
||||
return data
|
||||
|
||||
|
||||
#: In memory representation of the data in microarchitectures.json,
|
||||
#: loaded on first access
|
||||
TARGETS_JSON = LazyDictionary(_load_json_file("microarchitectures.json"))
|
||||
TARGETS_JSON = LazyDictionary(_load, *_json_file("microarchitectures.json", allow_custom=True))
|
||||
|
||||
#: JSON schema for microarchitectures.json, loaded on first access
|
||||
SCHEMA = LazyDictionary(_load_json_file("microarchitectures_schema.json"))
|
||||
TARGETS_JSON_SCHEMA = LazyDictionary(_load, *_json_file("microarchitectures_schema.json"))
|
||||
|
||||
#: Information on how to call 'cpuid' to get information on the HOST CPU
|
||||
CPUID_JSON = LazyDictionary(_load, *_json_file("cpuid.json", allow_custom=True))
|
||||
|
||||
#: JSON schema for cpuid.json, loaded on first access
|
||||
CPUID_JSON_SCHEMA = LazyDictionary(_load, *_json_file("cpuid_schema.json"))
|
||||
|
10
lib/spack/external/archspec/json/README.md
vendored
10
lib/spack/external/archspec/json/README.md
vendored
@@ -9,11 +9,11 @@ language specific APIs.
|
||||
|
||||
Currently the repository contains the following JSON files:
|
||||
```console
|
||||
.
|
||||
├── COPYRIGHT
|
||||
└── cpu
|
||||
├── microarchitectures.json # Contains information on CPU microarchitectures
|
||||
└── microarchitectures_schema.json # Schema for the file above
|
||||
cpu/
|
||||
├── cpuid.json # Contains information on CPUID calls to retrieve vendor and features on x86_64
|
||||
├── cpuid_schema.json # Schema for the file above
|
||||
├── microarchitectures.json # Contains information on CPU microarchitectures
|
||||
└── microarchitectures_schema.json # Schema for the file above
|
||||
```
|
||||
|
||||
|
||||
|
1050
lib/spack/external/archspec/json/cpu/cpuid.json
vendored
Normal file
1050
lib/spack/external/archspec/json/cpu/cpuid.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
134
lib/spack/external/archspec/json/cpu/cpuid_schema.json
vendored
Normal file
134
lib/spack/external/archspec/json/cpu/cpuid_schema.json
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Schema for microarchitecture definitions and feature aliases",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"vendor": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"input": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"eax": {
|
||||
"type": "integer"
|
||||
},
|
||||
"ecx": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"highest_extension_support": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"input": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"eax": {
|
||||
"type": "integer"
|
||||
},
|
||||
"ecx": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"flags": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"input": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"eax": {
|
||||
"type": "integer"
|
||||
},
|
||||
"ecx": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"bits": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"register": {
|
||||
"type": "string"
|
||||
},
|
||||
"bit": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"extension-flags": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"input": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"eax": {
|
||||
"type": "integer"
|
||||
},
|
||||
"ecx": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"bits": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"register": {
|
||||
"type": "string"
|
||||
},
|
||||
"bit": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
20
lib/spack/external/archspec/vendor/cpuid/LICENSE
vendored
Normal file
20
lib/spack/external/archspec/vendor/cpuid/LICENSE
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Anders Høst
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
76
lib/spack/external/archspec/vendor/cpuid/README.md
vendored
Normal file
76
lib/spack/external/archspec/vendor/cpuid/README.md
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
cpuid.py
|
||||
========
|
||||
|
||||
Now, this is silly!
|
||||
|
||||
Pure Python library for accessing information about x86 processors
|
||||
by querying the [CPUID](http://en.wikipedia.org/wiki/CPUID)
|
||||
instruction. Well, not exactly pure Python...
|
||||
|
||||
It works by allocating a small piece of virtual memory, copying
|
||||
a raw x86 function to that memory, giving the memory execute
|
||||
permissions and then calling the memory as a function. The injected
|
||||
function executes the CPUID instruction and copies the result back
|
||||
to a ctypes.Structure where is can be read by Python.
|
||||
|
||||
It should work fine on both 32 and 64 bit versions of Windows and Linux
|
||||
running x86 processors. Apple OS X and other BSD systems should also work,
|
||||
not tested though...
|
||||
|
||||
|
||||
Why?
|
||||
----
|
||||
For poops and giggles. Plus, having access to a low-level feature
|
||||
without having to compile a C wrapper is pretty neat.
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
Getting info with eax=0:
|
||||
|
||||
import cpuid
|
||||
|
||||
q = cpuid.CPUID()
|
||||
eax, ebx, ecx, edx = q(0)
|
||||
|
||||
Running the files:
|
||||
|
||||
$ python example.py
|
||||
Vendor ID : GenuineIntel
|
||||
CPU name : Intel(R) Xeon(R) CPU W3550 @ 3.07GHz
|
||||
|
||||
Vector instructions supported:
|
||||
SSE : Yes
|
||||
SSE2 : Yes
|
||||
SSE3 : Yes
|
||||
SSSE3 : Yes
|
||||
SSE4.1 : Yes
|
||||
SSE4.2 : Yes
|
||||
SSE4a : --
|
||||
AVX : --
|
||||
AVX2 : --
|
||||
|
||||
$ python cpuid.py
|
||||
CPUID A B C D
|
||||
00000000 0000000b 756e6547 6c65746e 49656e69
|
||||
00000001 000106a5 00100800 009ce3bd bfebfbff
|
||||
00000002 55035a01 00f0b2e4 00000000 09ca212c
|
||||
00000003 00000000 00000000 00000000 00000000
|
||||
00000004 00000000 00000000 00000000 00000000
|
||||
00000005 00000040 00000040 00000003 00001120
|
||||
00000006 00000003 00000002 00000001 00000000
|
||||
00000007 00000000 00000000 00000000 00000000
|
||||
00000008 00000000 00000000 00000000 00000000
|
||||
00000009 00000000 00000000 00000000 00000000
|
||||
0000000a 07300403 00000044 00000000 00000603
|
||||
0000000b 00000000 00000000 00000095 00000000
|
||||
80000000 80000008 00000000 00000000 00000000
|
||||
80000001 00000000 00000000 00000001 28100800
|
||||
80000002 65746e49 2952286c 6f655820 2952286e
|
||||
80000003 55504320 20202020 20202020 57202020
|
||||
80000004 30353533 20402020 37302e33 007a4847
|
||||
80000005 00000000 00000000 00000000 00000000
|
||||
80000006 00000000 00000000 01006040 00000000
|
||||
80000007 00000000 00000000 00000000 00000100
|
||||
80000008 00003024 00000000 00000000 00000000
|
||||
|
172
lib/spack/external/archspec/vendor/cpuid/cpuid.py
vendored
Normal file
172
lib/spack/external/archspec/vendor/cpuid/cpuid.py
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2024 Anders Høst
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import platform
|
||||
import os
|
||||
import ctypes
|
||||
from ctypes import c_uint32, c_long, c_ulong, c_size_t, c_void_p, POINTER, CFUNCTYPE
|
||||
|
||||
# Posix x86_64:
|
||||
# Three first call registers : RDI, RSI, RDX
|
||||
# Volatile registers : RAX, RCX, RDX, RSI, RDI, R8-11
|
||||
|
||||
# Windows x86_64:
|
||||
# Three first call registers : RCX, RDX, R8
|
||||
# Volatile registers : RAX, RCX, RDX, R8-11
|
||||
|
||||
# cdecl 32 bit:
|
||||
# Three first call registers : Stack (%esp)
|
||||
# Volatile registers : EAX, ECX, EDX
|
||||
|
||||
_POSIX_64_OPC = [
|
||||
0x53, # push %rbx
|
||||
0x89, 0xf0, # mov %esi,%eax
|
||||
0x89, 0xd1, # mov %edx,%ecx
|
||||
0x0f, 0xa2, # cpuid
|
||||
0x89, 0x07, # mov %eax,(%rdi)
|
||||
0x89, 0x5f, 0x04, # mov %ebx,0x4(%rdi)
|
||||
0x89, 0x4f, 0x08, # mov %ecx,0x8(%rdi)
|
||||
0x89, 0x57, 0x0c, # mov %edx,0xc(%rdi)
|
||||
0x5b, # pop %rbx
|
||||
0xc3 # retq
|
||||
]
|
||||
|
||||
_WINDOWS_64_OPC = [
|
||||
0x53, # push %rbx
|
||||
0x89, 0xd0, # mov %edx,%eax
|
||||
0x49, 0x89, 0xc9, # mov %rcx,%r9
|
||||
0x44, 0x89, 0xc1, # mov %r8d,%ecx
|
||||
0x0f, 0xa2, # cpuid
|
||||
0x41, 0x89, 0x01, # mov %eax,(%r9)
|
||||
0x41, 0x89, 0x59, 0x04, # mov %ebx,0x4(%r9)
|
||||
0x41, 0x89, 0x49, 0x08, # mov %ecx,0x8(%r9)
|
||||
0x41, 0x89, 0x51, 0x0c, # mov %edx,0xc(%r9)
|
||||
0x5b, # pop %rbx
|
||||
0xc3 # retq
|
||||
]
|
||||
|
||||
_CDECL_32_OPC = [
|
||||
0x53, # push %ebx
|
||||
0x57, # push %edi
|
||||
0x8b, 0x7c, 0x24, 0x0c, # mov 0xc(%esp),%edi
|
||||
0x8b, 0x44, 0x24, 0x10, # mov 0x10(%esp),%eax
|
||||
0x8b, 0x4c, 0x24, 0x14, # mov 0x14(%esp),%ecx
|
||||
0x0f, 0xa2, # cpuid
|
||||
0x89, 0x07, # mov %eax,(%edi)
|
||||
0x89, 0x5f, 0x04, # mov %ebx,0x4(%edi)
|
||||
0x89, 0x4f, 0x08, # mov %ecx,0x8(%edi)
|
||||
0x89, 0x57, 0x0c, # mov %edx,0xc(%edi)
|
||||
0x5f, # pop %edi
|
||||
0x5b, # pop %ebx
|
||||
0xc3 # ret
|
||||
]
|
||||
|
||||
is_windows = os.name == "nt"
|
||||
is_64bit = ctypes.sizeof(ctypes.c_voidp) == 8
|
||||
|
||||
|
||||
class CPUID_struct(ctypes.Structure):
|
||||
_register_names = ("eax", "ebx", "ecx", "edx")
|
||||
_fields_ = [(r, c_uint32) for r in _register_names]
|
||||
|
||||
def __getitem__(self, item):
|
||||
if item not in self._register_names:
|
||||
raise KeyError(item)
|
||||
return getattr(self, item)
|
||||
|
||||
def __repr__(self):
|
||||
return "eax=0x{:x}, ebx=0x{:x}, ecx=0x{:x}, edx=0x{:x}".format(self.eax, self.ebx, self.ecx, self.edx)
|
||||
|
||||
|
||||
class CPUID(object):
|
||||
def __init__(self):
|
||||
if platform.machine() not in ("AMD64", "x86_64", "x86", "i686"):
|
||||
raise SystemError("Only available for x86")
|
||||
|
||||
if is_windows:
|
||||
if is_64bit:
|
||||
# VirtualAlloc seems to fail under some weird
|
||||
# circumstances when ctypes.windll.kernel32 is
|
||||
# used under 64 bit Python. CDLL fixes this.
|
||||
self.win = ctypes.CDLL("kernel32.dll")
|
||||
opc = _WINDOWS_64_OPC
|
||||
else:
|
||||
# Here ctypes.windll.kernel32 is needed to get the
|
||||
# right DLL. Otherwise it will fail when running
|
||||
# 32 bit Python on 64 bit Windows.
|
||||
self.win = ctypes.windll.kernel32
|
||||
opc = _CDECL_32_OPC
|
||||
else:
|
||||
opc = _POSIX_64_OPC if is_64bit else _CDECL_32_OPC
|
||||
|
||||
size = len(opc)
|
||||
code = (ctypes.c_ubyte * size)(*opc)
|
||||
|
||||
if is_windows:
|
||||
self.win.VirtualAlloc.restype = c_void_p
|
||||
self.win.VirtualAlloc.argtypes = [ctypes.c_void_p, ctypes.c_size_t, ctypes.c_ulong, ctypes.c_ulong]
|
||||
self.addr = self.win.VirtualAlloc(None, size, 0x1000, 0x40)
|
||||
if not self.addr:
|
||||
raise MemoryError("Could not allocate RWX memory")
|
||||
ctypes.memmove(self.addr, code, size)
|
||||
else:
|
||||
from mmap import (
|
||||
mmap,
|
||||
MAP_PRIVATE,
|
||||
MAP_ANONYMOUS,
|
||||
PROT_WRITE,
|
||||
PROT_READ,
|
||||
PROT_EXEC,
|
||||
)
|
||||
self.mm = mmap(
|
||||
-1,
|
||||
size,
|
||||
flags=MAP_PRIVATE | MAP_ANONYMOUS,
|
||||
prot=PROT_WRITE | PROT_READ | PROT_EXEC,
|
||||
)
|
||||
self.mm.write(code)
|
||||
self.addr = ctypes.addressof(ctypes.c_int.from_buffer(self.mm))
|
||||
|
||||
func_type = CFUNCTYPE(None, POINTER(CPUID_struct), c_uint32, c_uint32)
|
||||
self.func_ptr = func_type(self.addr)
|
||||
|
||||
def __call__(self, eax, ecx=0):
|
||||
struct = self.registers_for(eax=eax, ecx=ecx)
|
||||
return struct.eax, struct.ebx, struct.ecx, struct.edx
|
||||
|
||||
def registers_for(self, eax, ecx=0):
|
||||
"""Calls cpuid with eax and ecx set as the input arguments, and returns a structure
|
||||
containing eax, ebx, ecx, and edx.
|
||||
"""
|
||||
struct = CPUID_struct()
|
||||
self.func_ptr(struct, eax, ecx)
|
||||
return struct
|
||||
|
||||
def __del__(self):
|
||||
if is_windows:
|
||||
self.win.VirtualFree.restype = c_long
|
||||
self.win.VirtualFree.argtypes = [c_void_p, c_size_t, c_ulong]
|
||||
self.win.VirtualFree(self.addr, 0, 0x8000)
|
||||
else:
|
||||
self.mm.close()
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def valid_inputs():
|
||||
cpuid = CPUID()
|
||||
for eax in (0x0, 0x80000000):
|
||||
highest, _, _, _ = cpuid(eax)
|
||||
while eax <= highest:
|
||||
regs = cpuid(eax)
|
||||
yield (eax, regs)
|
||||
eax += 1
|
||||
|
||||
|
||||
print(" ".join(x.ljust(8) for x in ("CPUID", "A", "B", "C", "D")).strip())
|
||||
for eax, regs in valid_inputs():
|
||||
print("%08x" % eax, " ".join("%08x" % reg for reg in regs))
|
62
lib/spack/external/archspec/vendor/cpuid/example.py
vendored
Normal file
62
lib/spack/external/archspec/vendor/cpuid/example.py
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2024 Anders Høst
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import struct
|
||||
import cpuid
|
||||
|
||||
|
||||
def cpu_vendor(cpu):
|
||||
_, b, c, d = cpu(0)
|
||||
return struct.pack("III", b, d, c).decode("utf-8")
|
||||
|
||||
|
||||
def cpu_name(cpu):
|
||||
name = "".join((struct.pack("IIII", *cpu(0x80000000 + i)).decode("utf-8")
|
||||
for i in range(2, 5)))
|
||||
|
||||
return name.split('\x00', 1)[0]
|
||||
|
||||
|
||||
def is_set(cpu, leaf, subleaf, reg_idx, bit):
|
||||
"""
|
||||
@param {leaf} %eax
|
||||
@param {sublead} %ecx, 0 in most cases
|
||||
@param {reg_idx} idx of [%eax, %ebx, %ecx, %edx], 0-based
|
||||
@param {bit} bit of reg selected by {reg_idx}, 0-based
|
||||
"""
|
||||
|
||||
regs = cpu(leaf, subleaf)
|
||||
|
||||
if (1 << bit) & regs[reg_idx]:
|
||||
return "Yes"
|
||||
else:
|
||||
return "--"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cpu = cpuid.CPUID()
|
||||
|
||||
print("Vendor ID : %s" % cpu_vendor(cpu))
|
||||
print("CPU name : %s" % cpu_name(cpu))
|
||||
print()
|
||||
print("Vector instructions supported:")
|
||||
print("SSE : %s" % is_set(cpu, 1, 0, 3, 25))
|
||||
print("SSE2 : %s" % is_set(cpu, 1, 0, 3, 26))
|
||||
print("SSE3 : %s" % is_set(cpu, 1, 0, 2, 0))
|
||||
print("SSSE3 : %s" % is_set(cpu, 1, 0, 2, 9))
|
||||
print("SSE4.1 : %s" % is_set(cpu, 1, 0, 2, 19))
|
||||
print("SSE4.2 : %s" % is_set(cpu, 1, 0, 2, 20))
|
||||
print("SSE4a : %s" % is_set(cpu, 0x80000001, 0, 2, 6))
|
||||
print("AVX : %s" % is_set(cpu, 1, 0, 2, 28))
|
||||
print("AVX2 : %s" % is_set(cpu, 7, 0, 1, 5))
|
||||
print("BMI1 : %s" % is_set(cpu, 7, 0, 1, 3))
|
||||
print("BMI2 : %s" % is_set(cpu, 7, 0, 1, 8))
|
||||
# Intel RDT CMT/MBM
|
||||
print("L3 Monitoring : %s" % is_set(cpu, 0xf, 0, 3, 1))
|
||||
print("L3 Occupancy : %s" % is_set(cpu, 0xf, 1, 3, 0))
|
||||
print("L3 Total BW : %s" % is_set(cpu, 0xf, 1, 3, 1))
|
||||
print("L3 Local BW : %s" % is_set(cpu, 0xf, 1, 3, 2))
|
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
index 1badeda585..892c868af3 100644
|
||||
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
@@ -42,11 +42,6 @@ def convert_to_posix_path(path: str) -> str:
|
||||
return format_os_path(path, mode=Path.unix)
|
||||
|
||||
|
||||
def convert_to_windows_path(path: str) -> str:
|
||||
"""Converts the input path to Windows style."""
|
||||
return format_os_path(path, mode=Path.windows)
|
||||
|
||||
|
||||
def convert_to_platform_path(path: str) -> str:
|
||||
"""Converts the input path to the current platform's native style."""
|
||||
return format_os_path(path, mode=Path.platform_path)
|
||||
|
@@ -12,7 +12,7 @@
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz", "whl")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
@@ -357,10 +357,8 @@ def strip_version_suffixes(path_or_url: str) -> str:
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
# PyPI wheels
|
||||
r"-(?:py|cp)[23].*",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
@@ -403,7 +401,7 @@ def expand_contracted_extension_in_path(
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
for ext in EXTENSIONS:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
@@ -198,15 +198,32 @@ def getuid():
|
||||
return os.getuid()
|
||||
|
||||
|
||||
def _win_rename(src, dst):
|
||||
# os.replace will still fail if on Windows (but not POSIX) if the dst
|
||||
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
|
||||
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
|
||||
if os.path.samefile(src, dst):
|
||||
# src and dst are the same
|
||||
# do nothing and exit early
|
||||
return
|
||||
# If dst exists and is a symlink to a directory
|
||||
# we need to remove dst and then perform rename/replace
|
||||
# this is safe to do as there's no chance src == dst now
|
||||
os.remove(dst)
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
|
||||
# the MOVEFILE_REPLACE_EXISTING flag on Windows
|
||||
# Windows invocation is abstracted behind additonal logic handling
|
||||
# remaining cases of divergent behavior accross platforms
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
_win_rename(src, dst)
|
||||
else:
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -237,16 +254,6 @@ def _get_mime_type():
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
|
||||
|
||||
@memoized
|
||||
def _get_mime_type_compressed():
|
||||
"""Same as _get_mime_type but attempts to check for
|
||||
compression first
|
||||
"""
|
||||
mime_uncompressed = _get_mime_type()
|
||||
mime_uncompressed.add_default_arg("-Z")
|
||||
return mime_uncompressed
|
||||
|
||||
|
||||
def mime_type(filename):
|
||||
"""Returns the mime type and subtype of a file.
|
||||
|
||||
@@ -262,21 +269,6 @@ def mime_type(filename):
|
||||
return type, subtype
|
||||
|
||||
|
||||
def compressed_mime_type(filename):
|
||||
"""Same as mime_type but checks for type that has been compressed
|
||||
|
||||
Args:
|
||||
filename (str): file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type_compressed()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||
|
||||
@@ -308,13 +300,6 @@ def paths_containing_libs(paths, library_names):
|
||||
return rpaths_to_include
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def same_path(path1, path2):
|
||||
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
|
||||
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
|
||||
return norm1 == norm2
|
||||
|
||||
|
||||
def filter_file(
|
||||
regex: str,
|
||||
repl: Union[str, Callable[[Match], str]],
|
||||
@@ -909,17 +894,6 @@ def is_exe(path):
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def get_filetype(path_name):
|
||||
"""
|
||||
Return the output of file path_name as a string to identify file type.
|
||||
"""
|
||||
file = Executable("file")
|
||||
file.add_default_env("LC_ALL", "C")
|
||||
output = file("-b", "-h", "%s" % path_name, output=str, error=str)
|
||||
return output.strip()
|
||||
|
||||
|
||||
def has_shebang(path):
|
||||
"""Returns whether a path has a shebang line. Returns False if the file cannot be opened."""
|
||||
try:
|
||||
@@ -1169,20 +1143,6 @@ def write_tmp_and_move(filename):
|
||||
shutil.move(tmp, filename)
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def open_if_filename(str_or_file, mode="r"):
|
||||
"""Takes either a path or a file object, and opens it if it is a path.
|
||||
|
||||
If it's a file object, just yields the file object.
|
||||
"""
|
||||
if isinstance(str_or_file, str):
|
||||
with open(str_or_file, mode) as f:
|
||||
yield f
|
||||
else:
|
||||
yield str_or_file
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def touch(path):
|
||||
"""Creates an empty file at the specified path."""
|
||||
@@ -1274,10 +1234,12 @@ def windows_sfn(path: os.PathLike):
|
||||
import ctypes
|
||||
|
||||
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
# Method with null values returns size of short path name
|
||||
sz = k32.GetShortPathNameW(path, None, 0)
|
||||
# stub Windows types TCHAR[LENGTH]
|
||||
TCHAR_arr = ctypes.c_wchar * len(path)
|
||||
TCHAR_arr = ctypes.c_wchar * sz
|
||||
ret_str = TCHAR_arr()
|
||||
k32.GetShortPathNameW(path, ret_str, len(path))
|
||||
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
|
||||
return ret_str.value
|
||||
|
||||
|
||||
@@ -1295,19 +1257,6 @@ def temp_cwd():
|
||||
shutil.rmtree(tmp_dir, **kwargs)
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def temp_rename(orig_path, temp_path):
|
||||
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
|
||||
if not same_path:
|
||||
shutil.move(orig_path, temp_path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not same_path:
|
||||
shutil.move(temp_path, orig_path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def can_access(file_name):
|
||||
"""True if we have read/write access to the file."""
|
||||
|
@@ -98,36 +98,6 @@ def caller_locals():
|
||||
del stack
|
||||
|
||||
|
||||
def get_calling_module_name():
|
||||
"""Make sure that the caller is a class definition, and return the
|
||||
enclosing module's name.
|
||||
"""
|
||||
# Passing zero here skips line context for speed.
|
||||
stack = inspect.stack(0)
|
||||
try:
|
||||
# Make sure locals contain __module__
|
||||
caller_locals = stack[2][0].f_locals
|
||||
finally:
|
||||
del stack
|
||||
|
||||
if "__module__" not in caller_locals:
|
||||
raise RuntimeError(
|
||||
"Must invoke get_calling_module_name() " "from inside a class definition!"
|
||||
)
|
||||
|
||||
module_name = caller_locals["__module__"]
|
||||
base_name = module_name.split(".")[-1]
|
||||
return base_name
|
||||
|
||||
|
||||
def attr_required(obj, attr_name):
|
||||
"""Ensure that a class has a required attribute."""
|
||||
if not hasattr(obj, attr_name):
|
||||
raise RequiredAttributeError(
|
||||
"No required attribute '%s' in class '%s'" % (attr_name, obj.__class__.__name__)
|
||||
)
|
||||
|
||||
|
||||
def attr_setdefault(obj, name, value):
|
||||
"""Like dict.setdefault, but for objects."""
|
||||
if not hasattr(obj, name):
|
||||
@@ -513,42 +483,6 @@ def copy(self):
|
||||
return clone
|
||||
|
||||
|
||||
def in_function(function_name):
|
||||
"""True if the caller was called from some function with
|
||||
the supplied Name, False otherwise."""
|
||||
stack = inspect.stack()
|
||||
try:
|
||||
for elt in stack[2:]:
|
||||
if elt[3] == function_name:
|
||||
return True
|
||||
return False
|
||||
finally:
|
||||
del stack
|
||||
|
||||
|
||||
def check_kwargs(kwargs, fun):
|
||||
"""Helper for making functions with kwargs. Checks whether the kwargs
|
||||
are empty after all of them have been popped off. If they're
|
||||
not, raises an error describing which kwargs are invalid.
|
||||
|
||||
Example::
|
||||
|
||||
def foo(self, **kwargs):
|
||||
x = kwargs.pop('x', None)
|
||||
y = kwargs.pop('y', None)
|
||||
z = kwargs.pop('z', None)
|
||||
check_kwargs(kwargs, self.foo)
|
||||
|
||||
# This raises a TypeError:
|
||||
foo(w='bad kwarg')
|
||||
"""
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for function %s()."
|
||||
% (next(iter(kwargs)), fun.__name__)
|
||||
)
|
||||
|
||||
|
||||
def match_predicate(*args):
|
||||
"""Utility function for making string matching predicates.
|
||||
|
||||
@@ -764,11 +698,6 @@ def pretty_seconds(seconds):
|
||||
return pretty_seconds_formatter(seconds)(seconds)
|
||||
|
||||
|
||||
class RequiredAttributeError(ValueError):
|
||||
def __init__(self, message):
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class ObjectWrapper:
|
||||
"""Base class that wraps an object. Derived classes can add new behavior
|
||||
while staying undercover.
|
||||
@@ -843,6 +772,30 @@ def __repr__(self):
|
||||
return repr(self.instance)
|
||||
|
||||
|
||||
def get_entry_points(*, group: str):
|
||||
"""Wrapper for ``importlib.metadata.entry_points``
|
||||
|
||||
Args:
|
||||
group: entry points to select
|
||||
|
||||
Returns:
|
||||
EntryPoints for ``group`` or empty list if unsupported
|
||||
"""
|
||||
|
||||
try:
|
||||
import importlib.metadata # type: ignore # novermin
|
||||
except ImportError:
|
||||
return []
|
||||
|
||||
try:
|
||||
return importlib.metadata.entry_points(group=group)
|
||||
except TypeError:
|
||||
# Prior to Python 3.10, entry_points accepted no parameters and always
|
||||
# returned a dictionary of entry points, keyed by group. See
|
||||
# https://docs.python.org/3/library/importlib.metadata.html#entry-points
|
||||
return importlib.metadata.entry_points().get(group, [])
|
||||
|
||||
|
||||
def load_module_from_file(module_name, module_path):
|
||||
"""Loads a python module from the path of the corresponding file.
|
||||
|
||||
@@ -911,25 +864,6 @@ def uniq(sequence):
|
||||
return uniq_list
|
||||
|
||||
|
||||
def star(func):
|
||||
"""Unpacks arguments for use with Multiprocessing mapping functions"""
|
||||
|
||||
def _wrapper(args):
|
||||
return func(*args)
|
||||
|
||||
return _wrapper
|
||||
|
||||
|
||||
class Devnull:
|
||||
"""Null stream with less overhead than ``os.devnull``.
|
||||
|
||||
See https://stackoverflow.com/a/2929954.
|
||||
"""
|
||||
|
||||
def write(self, *_):
|
||||
pass
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
@@ -815,10 +815,6 @@ def __init__(self, path):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class LockLimitError(LockError):
|
||||
"""Raised when exceed maximum attempts to acquire a lock."""
|
||||
|
||||
|
||||
class LockTimeoutError(LockError):
|
||||
"""Raised when an attempt to acquire a lock times out."""
|
||||
|
||||
|
@@ -44,10 +44,6 @@ def is_debug(level=1):
|
||||
return _debug >= level
|
||||
|
||||
|
||||
def is_stacktrace():
|
||||
return _stacktrace
|
||||
|
||||
|
||||
def set_debug(level=0):
|
||||
global _debug
|
||||
assert level >= 0, "Debug level must be a positive value"
|
||||
@@ -252,37 +248,6 @@ def die(message, *args, **kwargs) -> NoReturn:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_number(prompt, **kwargs):
|
||||
default = kwargs.get("default", None)
|
||||
abort = kwargs.get("abort", None)
|
||||
|
||||
if default is not None and abort is not None:
|
||||
prompt += " (default is %s, %s to abort) " % (default, abort)
|
||||
elif default is not None:
|
||||
prompt += " (default is %s) " % default
|
||||
elif abort is not None:
|
||||
prompt += " (%s to abort) " % abort
|
||||
|
||||
number = None
|
||||
while number is None:
|
||||
msg(prompt, newline=False)
|
||||
ans = input()
|
||||
if ans == str(abort):
|
||||
return None
|
||||
|
||||
if ans:
|
||||
try:
|
||||
number = int(ans)
|
||||
if number < 1:
|
||||
msg("Please enter a valid number.")
|
||||
number = None
|
||||
except ValueError:
|
||||
msg("Please enter a valid number.")
|
||||
elif default is not None:
|
||||
number = default
|
||||
return number
|
||||
|
||||
|
||||
def get_yes_or_no(prompt, **kwargs):
|
||||
default_value = kwargs.get("default", None)
|
||||
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
@@ -111,10 +110,6 @@ def __init__(self, errors):
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class BinaryCacheIndex:
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
@@ -541,83 +536,6 @@ def binary_index_location():
|
||||
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf-8")
|
||||
@@ -992,15 +910,10 @@ def url_read_method(url):
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
# If we got some kind of S3 (access denied or other connection error), the first non
|
||||
# boto-specific class in the exception is Exception. Just print a warning and return
|
||||
tty.warn(f"Encountered problem listing packages at {cache_prefix}: {err}")
|
||||
|
||||
return file_list, read_fn
|
||||
|
||||
@@ -1047,11 +960,10 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
"""
|
||||
try:
|
||||
file_list, read_fn = _spec_files_from_cache(cache_prefix)
|
||||
except ListMirrorSpecsError as err:
|
||||
tty.error("Unable to generate package index, {0}".format(err))
|
||||
return
|
||||
except ListMirrorSpecsError as e:
|
||||
raise GenerateIndexError(f"Unable to generate package index: {e}") from e
|
||||
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
tty.debug(f"Retrieving spec descriptor files from {cache_prefix} to build index")
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
@@ -1061,27 +973,22 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
tty.debug("\n" + traceback.format_exc())
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing package index to {cache_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def generate_key_index(key_prefix, tmpdir=None):
|
||||
"""Create the key index page.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
key_prefix. This page contains an entry for each key (.pub) under
|
||||
key_prefix.
|
||||
Creates (or replaces) the "index.json" page at the location given in key_prefix. This page
|
||||
contains an entry for each key (.pub) under key_prefix.
|
||||
"""
|
||||
|
||||
tty.debug(
|
||||
" ".join(
|
||||
("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
|
||||
)
|
||||
)
|
||||
tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index")
|
||||
|
||||
try:
|
||||
fingerprints = (
|
||||
@@ -1089,17 +996,8 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||
if entry.endswith(".pub")
|
||||
)
|
||||
except KeyError as inst:
|
||||
msg = "No keys at {0}: {1}".format(key_prefix, inst)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as e:
|
||||
raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e
|
||||
|
||||
remove_tmpdir = False
|
||||
|
||||
@@ -1124,12 +1022,13 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing key index to {key_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
if remove_tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
@@ -1200,7 +1099,8 @@ def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
|
||||
used at the mirror (following <tarball_directory_name>).
|
||||
|
||||
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
|
||||
spec.json file already exist in the buildcache.
|
||||
spec.json file already exist in the buildcache. It raises :py:class:`PushToBuildCacheError`
|
||||
when the tarball or spec.json file cannot be pushed to the buildcache.
|
||||
"""
|
||||
if not spec.concrete:
|
||||
raise ValueError("spec must be concrete to build tarball")
|
||||
@@ -1278,13 +1178,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
key = select_signing_key(options.key)
|
||||
sign_specfile(key, options.force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
try:
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
except Exception as e:
|
||||
raise PushToBuildCacheError(
|
||||
f"Encountered problem pushing binary {remote_spackfile_path}: {e}"
|
||||
) from e
|
||||
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
@@ -1296,8 +1201,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
if options.regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class NotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
@@ -1352,28 +1255,6 @@ def specs_to_be_packaged(
|
||||
return [s for s in itertools.chain(roots, deps) if not s.external]
|
||||
|
||||
|
||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
||||
"""Create and push binary package for a single spec to the specified
|
||||
mirror url.
|
||||
|
||||
Args:
|
||||
spec: Spec to package and push
|
||||
mirror_url: Desired destination url for binary package
|
||||
options:
|
||||
|
||||
Returns:
|
||||
True if package was pushed, False otherwise.
|
||||
|
||||
"""
|
||||
try:
|
||||
push_or_raise(spec, mirror_url, options)
|
||||
except NoOverwriteException as e:
|
||||
warnings.warn(str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
@@ -2706,3 +2587,96 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
||||
|
||||
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class GenerateIndexError(spack.error.SpackError):
|
||||
"""Raised when unable to generate key or package index for mirror"""
|
||||
|
||||
|
||||
class CannotListKeys(GenerateIndexError):
|
||||
"""Raised when unable to list keys when generating key index"""
|
||||
|
||||
|
||||
class PushToBuildCacheError(spack.error.SpackError):
|
||||
"""Raised when unable to push objects to binary mirror"""
|
||||
|
@@ -213,9 +213,6 @@ def _root_spec(spec_str: str) -> str:
|
||||
platform = str(spack.platforms.host())
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
||||
spec_str += " %msvc@:19.37"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
|
@@ -147,7 +147,7 @@ def _add_compilers_if_missing() -> None:
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
spack.compilers.add_compilers_to_config(new_compilers)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -559,12 +559,49 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
||||
)
|
||||
|
||||
|
||||
def ensure_winsdk_external_or_raise() -> None:
|
||||
"""Ensure the Windows SDK + WGL are available on system
|
||||
If both of these package are found, the Spack user or bootstrap
|
||||
configuration (depending on where Spack is running)
|
||||
will be updated to include all versions and variants detected.
|
||||
If either the WDK or WSDK are not found, this method will raise
|
||||
a RuntimeError.
|
||||
|
||||
**NOTE:** This modifies the Spack config in the current scope,
|
||||
either user or environment depending on the calling context.
|
||||
This is different from all other current bootstrap dependency
|
||||
checks.
|
||||
"""
|
||||
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||
return
|
||||
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||
missing_packages_lst = []
|
||||
if "wgl" not in externals:
|
||||
missing_packages_lst.append("wgl")
|
||||
if "win-sdk" not in externals:
|
||||
missing_packages_lst.append("win-sdk")
|
||||
missing_packages = " & ".join(missing_packages_lst)
|
||||
raise RuntimeError(
|
||||
f"Unable to find the {missing_packages}, please install these packages\
|
||||
via the Visual Studio installer\
|
||||
before proceeding with Spack or provide the path to a non standard install via\
|
||||
'spack external find --path'"
|
||||
)
|
||||
# wgl/sdk are not required for bootstrapping Spack, but
|
||||
# are required for building anything non trivial
|
||||
# add to user config so they can be used by subsequent Spack ops
|
||||
spack.detection.update_configuration(externals, buildable=False)
|
||||
|
||||
|
||||
def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
else:
|
||||
ensure_winsdk_external_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
|
@@ -57,8 +57,10 @@
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
@@ -66,6 +68,7 @@
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
@@ -78,7 +81,7 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
@@ -101,9 +104,13 @@
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
||||
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
|
||||
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
|
||||
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
|
||||
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
||||
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
||||
SPACK_PREFIX = "SPACK_PREFIX"
|
||||
@@ -416,7 +423,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
@@ -544,9 +551,23 @@ def update_compiler_args_for_dep(dep):
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
spack_managed_dirs: List[str] = [
|
||||
spack.stage.get_stage_root(),
|
||||
spack.store.STORE.db.root,
|
||||
*(db.root for db in spack.store.STORE.db.upstream_dbs),
|
||||
]
|
||||
|
||||
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in spack_managed_dirs))
|
||||
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
|
||||
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
|
||||
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
|
||||
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
|
||||
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
|
||||
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
|
||||
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
|
||||
|
||||
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
@@ -583,10 +604,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
@@ -789,7 +822,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
for mod in ["cray-mpich", "cray-libsci"]:
|
||||
module("unload", mod)
|
||||
|
||||
if target.module_name:
|
||||
if target and target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
@@ -434,11 +434,6 @@ def _do_patch_libtool(self):
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.pkg.compiler.name == "dpcpp":
|
||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
||||
# (see https://github.com/spack/spack/issues/32863):
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
@@ -541,7 +536,7 @@ def autoreconf(self, pkg, spec, prefix):
|
||||
if os.path.exists(self.configure_abs_path):
|
||||
return
|
||||
|
||||
# Else try to regenerate it, which reuquires a few build dependencies
|
||||
# Else try to regenerate it, which requires a few build dependencies
|
||||
ensure_build_dependencies_or_raise(
|
||||
spec=spec,
|
||||
dependencies=["autoconf", "automake", "libtool"],
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -15,6 +16,12 @@
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
|
||||
def spec_uses_toolchain(spec):
|
||||
gcc_toolchain_regex = re.compile(".*gcc-toolchain.*")
|
||||
using_toolchain = list(filter(gcc_toolchain_regex.match, spec.compiler_flags["cxxflags"]))
|
||||
return using_toolchain
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
force_str = " FORCE" if force else ""
|
||||
@@ -213,7 +220,7 @@ def initconfig_mpi_entries(self):
|
||||
else:
|
||||
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
||||
# vs the older versions which expect MPIEXEC
|
||||
if self.pkg.spec["cmake"].satisfies("@3.10:"):
|
||||
if spec["cmake"].satisfies("@3.10:"):
|
||||
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
||||
else:
|
||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||
@@ -248,12 +255,17 @@ def initconfig_hardware_entries(self):
|
||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
# CUDA_FLAGS
|
||||
cuda_flags = []
|
||||
|
||||
if not spec.satisfies("cuda_arch=none"):
|
||||
cuda_archs = ";".join(spec.variants["cuda_arch"].value)
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_archs))
|
||||
|
||||
if spec_uses_toolchain(spec):
|
||||
cuda_flags.append("-Xcompiler {}".format(spec_uses_toolchain(spec)[0]))
|
||||
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
@@ -262,9 +274,6 @@ def initconfig_hardware_entries(self):
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
@@ -277,11 +286,9 @@ def initconfig_hardware_entries(self):
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
return entries
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using cargo."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -21,7 +21,7 @@
|
||||
|
||||
|
||||
class MakefilePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using Makefiles."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, variant
|
||||
from spack.directives import conflicts, license, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
@@ -26,6 +26,7 @@ class IntelOneApiPackage(Package):
|
||||
"""Base class for Intel oneAPI packages."""
|
||||
|
||||
homepage = "https://software.intel.com/oneapi"
|
||||
license("https://intel.ly/393CijO")
|
||||
|
||||
# oneAPI license does not allow mirroring outside of the
|
||||
# organization (e.g. University/Company).
|
||||
|
@@ -4,12 +4,15 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import Iterable
|
||||
|
||||
from llnl.util.filesystem import filter_file
|
||||
from llnl.util.filesystem import filter_file, find
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
@@ -28,6 +31,58 @@ class PerlPackage(spack.package_base.PackageBase):
|
||||
|
||||
extends("perl", when="build_system=perl")
|
||||
|
||||
@property
|
||||
@memoized
|
||||
def _platform_dir(self):
|
||||
"""Name of platform-specific module subdirectory."""
|
||||
perl = self.spec["perl"].command
|
||||
options = "-E", "use Config; say $Config{archname}"
|
||||
out = perl(*options, output=str.split, error=str.split)
|
||||
return out.strip()
|
||||
|
||||
@property
|
||||
def use_modules(self) -> Iterable[str]:
|
||||
"""Names of the package's perl modules."""
|
||||
module_files = find(self.prefix.lib, ["*.pm"], recursive=True)
|
||||
|
||||
# Drop the platform directory, if present
|
||||
if self._platform_dir:
|
||||
platform_dir = self._platform_dir + os.sep
|
||||
module_files = [m.replace(platform_dir, "") for m in module_files]
|
||||
|
||||
# Drop the extension and library path
|
||||
prefix = self.prefix.lib + os.sep
|
||||
modules = [os.path.splitext(m)[0].replace(prefix, "") for m in module_files]
|
||||
|
||||
# Drop the perl subdirectory as well
|
||||
return ["::".join(m.split(os.sep)[1:]) for m in modules]
|
||||
|
||||
@property
|
||||
def skip_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that should be skipped when running tests.
|
||||
|
||||
These are a subset of use_modules.
|
||||
|
||||
Returns:
|
||||
List of strings of module names.
|
||||
"""
|
||||
return []
|
||||
|
||||
def test_use(self):
|
||||
"""Test 'use module'"""
|
||||
if not self.use_modules:
|
||||
raise SkipTest("Test requires use_modules package property.")
|
||||
|
||||
perl = self.spec["perl"].command
|
||||
for module in self.use_modules:
|
||||
if module in self.skip_modules:
|
||||
continue
|
||||
|
||||
with test_part(self, f"test_use-{module}", purpose=f"checking use of {module}"):
|
||||
options = ["-we", f'use strict; use {module}; print("OK\n")']
|
||||
out = perl(*options, output=str.split, error=str.split)
|
||||
assert "OK" in out
|
||||
|
||||
|
||||
@spack.builder.builder("perl")
|
||||
class PerlBuilder(BaseBuilder):
|
||||
@@ -52,7 +107,7 @@ class PerlBuilder(BaseBuilder):
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_args", "check")
|
||||
legacy_methods = ("configure_args", "check", "test_use")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
@@ -27,7 +27,7 @@
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
from spack.spec import Spec
|
||||
@@ -56,8 +56,6 @@ def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that the Python package provides.
|
||||
|
@@ -75,6 +75,8 @@
|
||||
# does not like its directory structure.
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
@@ -154,6 +156,32 @@ def hip_flags(amdgpu_target):
|
||||
archs = ",".join(amdgpu_target)
|
||||
return "--amdgpu-target={0}".format(archs)
|
||||
|
||||
# ASAN
|
||||
@staticmethod
|
||||
def asan_on(env, llvm_path):
|
||||
env.set("CC", llvm_path + "/bin/clang")
|
||||
env.set("CXX", llvm_path + "/bin/clang++")
|
||||
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||
|
||||
for root, dirs, files in os.walk(llvm_path):
|
||||
if "libclang_rt.asan-x86_64.so" in files:
|
||||
asan_lib_path = root
|
||||
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
try:
|
||||
# This will throw an error if imported on a non-Linux platform.
|
||||
import distro
|
||||
|
||||
distname = distro.id()
|
||||
except ImportError:
|
||||
distname = "unknown"
|
||||
if "rhel" in distname or "sles" in distname:
|
||||
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||
|
||||
env.set("CFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("CXXFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||
|
||||
# HIP version vs Architecture
|
||||
|
||||
# TODO: add a bunch of lines like:
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import inspect
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
import spack.build_environment
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
@@ -231,24 +233,27 @@ def __new__(mcs, name, bases, attr_dict):
|
||||
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||
staged_callbacks = temporary_stage.callbacks
|
||||
|
||||
# We don't have callbacks in this class, move on
|
||||
if not staged_callbacks:
|
||||
# Here we have an adapter from an old-style package. This means there is no
|
||||
# hierarchy of builders, and every callback that had to be combined between
|
||||
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||
if name == "Adapter":
|
||||
continue
|
||||
|
||||
# If we are here we have callbacks. To get a complete list, get first what
|
||||
# was attached to parent classes, then prepend what we have registered here.
|
||||
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||
# registered here.
|
||||
#
|
||||
# The order should be:
|
||||
# 1. Callbacks are registered in order within the same class
|
||||
# 2. Callbacks defined in derived classes precede those defined in base
|
||||
# classes
|
||||
callbacks_from_base = []
|
||||
for base in bases:
|
||||
callbacks_from_base = getattr(base, temporary_stage.attribute_name, None)
|
||||
if callbacks_from_base:
|
||||
break
|
||||
else:
|
||||
callbacks_from_base = []
|
||||
|
||||
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||
if not current_callbacks:
|
||||
continue
|
||||
callbacks_from_base.extend(current_callbacks)
|
||||
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||
# Set the callbacks in this class and flush the temporary stage
|
||||
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||
del temporary_stage.callbacks[:]
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -334,8 +334,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorators (dict): dictionary mappng specs to decorators
|
||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
@@ -384,15 +383,13 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
|
||||
transform = {"package": decorator, "fullpackage": decorator}
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
string += s.cformat(format_string, transform=transform)
|
||||
string += decorator(s, s.cformat(format_string))
|
||||
return string
|
||||
|
||||
def format_list(specs):
|
||||
@@ -451,7 +448,7 @@ def filter_loaded_specs(specs):
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
def print_how_many_pkgs(specs, pkg_type=""):
|
||||
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
|
||||
"""Given a list of specs, this will print a message about how many
|
||||
specs are in that list.
|
||||
|
||||
@@ -462,7 +459,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
@@ -275,23 +275,37 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
|
||||
sync_manifest_source = sync.add_argument_group(
|
||||
"Manifest Source",
|
||||
"Specify a list of build cache objects to sync using manifest file(s)."
|
||||
'This option takes the place of the "source mirror" for synchronization'
|
||||
'and optionally takes a "destination mirror" ',
|
||||
)
|
||||
sync.add_argument(
|
||||
sync_manifest_source.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying CI rebuild manifest files"
|
||||
)
|
||||
sync_source_mirror = sync.add_argument_group(
|
||||
"Named Source",
|
||||
"Specify a single registered source mirror to synchronize from. This option requires"
|
||||
"the specification of a destination mirror.",
|
||||
)
|
||||
sync_source_mirror.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
@@ -1070,7 +1084,17 @@ def sync_fn(args):
|
||||
requires an active environment in order to know which specs to sync
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
# Passing the args.src_mirror here because it is not possible to
|
||||
# have the destination be required when specifying a named source
|
||||
# mirror and optional for the --manifest-glob argument. In the case
|
||||
# of manifest glob sync, the source mirror positional argument is the
|
||||
# destination mirror if it is specified. If there are two mirrors
|
||||
# specified, the second is ignored and the first is the override
|
||||
# destination.
|
||||
if args.dest_mirror:
|
||||
tty.warn(f"Ignoring unused arguemnt: {args.dest_mirror.name}")
|
||||
|
||||
manifest_copy(glob.glob(args.manifest_glob), args.src_mirror)
|
||||
return 0
|
||||
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
@@ -1121,7 +1145,7 @@ def sync_fn(args):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list):
|
||||
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
@@ -1135,10 +1159,17 @@ def manifest_copy(manifest_file_list):
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"]))
|
||||
copy_buildcache_file(copy_file["src"], copy_file["dest"])
|
||||
dest = copy_file["dest"]
|
||||
if dest_mirror:
|
||||
src_relative_path = os.path.join(
|
||||
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||
)
|
||||
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
@@ -1165,14 +1196,18 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||
)
|
||||
|
||||
bindist.generate_key_index(keys_url)
|
||||
try:
|
||||
bindist.generate_key_index(keys_url)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
tty.warn(f"did not update the key index: {e}")
|
||||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
return update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
||||
return args.func(args)
|
||||
|
@@ -183,7 +183,7 @@ def checksum(parser, args):
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
add_versions_to_package(pkg, version_lines, args.batch)
|
||||
|
||||
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
@@ -229,7 +229,7 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
@@ -282,5 +282,5 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
if sys.stdout.isatty() and not is_batch:
|
||||
editor(filename)
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.ci as spack_ci
|
||||
import spack.cmd
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
@@ -32,6 +33,7 @@
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
|
||||
def deindent(desc):
|
||||
@@ -705,11 +707,9 @@ def ci_rebuild(args):
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||
# will result in a non-zero exit code. Pushing is best-effort.
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
@@ -721,13 +721,12 @@ def ci_rebuild(args):
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
if not result.success:
|
||||
install_exit_code = FAILED_CREATE_BUILDCACHE_CODE
|
||||
(tty.msg if result.success else tty.error)(
|
||||
f'{"Pushed" if result.success else "Failed to push"} '
|
||||
f'{job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when())} '
|
||||
f"to {result.url}"
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
@@ -748,22 +747,22 @@ def ci_rebuild(args):
|
||||
tty.warn(msg.format(broken_spec_path, err))
|
||||
|
||||
else:
|
||||
# If the install did not succeed, print out some instructions on how to reproduce this
|
||||
# build failure outside of the pipeline environment.
|
||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||
|
||||
api_root_url = os.environ.get("CI_API_V4_URL")
|
||||
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
||||
ci_job_id = os.environ.get("CI_JOB_ID")
|
||||
|
||||
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
|
||||
api_root_url, ci_project_id, ci_job_id
|
||||
)
|
||||
|
||||
repro_job_url = f"{api_root_url}/projects/{ci_project_id}/jobs/{ci_job_id}/artifacts"
|
||||
# Control characters cause this to be printed in blue so it stands out
|
||||
reproduce_msg = """
|
||||
print(
|
||||
f"""
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
spack ci reproduce-build {repro_job_url} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -771,12 +770,9 @@ def ci_rebuild(args):
|
||||
|
||||
... then follow the printed instructions.\033[0;0m
|
||||
|
||||
""".format(
|
||||
repro_job_url
|
||||
"""
|
||||
)
|
||||
|
||||
print(reproduce_msg)
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
|
@@ -570,6 +570,14 @@ def add_concretizer_args(subparser):
|
||||
default=None,
|
||||
help="reuse installed dependencies only",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--deprecated",
|
||||
action=ConfigSetAction,
|
||||
dest="config:deprecated",
|
||||
const=True,
|
||||
default=None,
|
||||
help="allow concretizer to select deprecated versions",
|
||||
)
|
||||
|
||||
|
||||
def add_connection_args(subparser, add_help):
|
||||
|
@@ -89,7 +89,7 @@ def compiler_find(args):
|
||||
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
|
@@ -19,7 +19,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["jobs"])
|
||||
arguments.add_common_arguments(subparser, ["jobs", "no_checksum", "spec"])
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--source-path",
|
||||
@@ -34,7 +34,6 @@ def setup_parser(subparser):
|
||||
dest="ignore_deps",
|
||||
help="do not try to install dependencies of requested packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
subparser.add_argument(
|
||||
"--keep-prefix",
|
||||
action="store_true",
|
||||
@@ -63,7 +62,6 @@ def setup_parser(subparser):
|
||||
choices=["root", "all"],
|
||||
help="run tests on only root packages or all packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
stop_group = subparser.add_mutually_exclusive_group()
|
||||
stop_group.add_argument(
|
||||
@@ -125,9 +123,6 @@ def dev_build(self, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
tests = False
|
||||
if args.test == "all":
|
||||
tests = True
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import llnl.string as string
|
||||
@@ -44,6 +45,7 @@
|
||||
"deactivate",
|
||||
"create",
|
||||
["remove", "rm"],
|
||||
["rename", "mv"],
|
||||
["list", "ls"],
|
||||
["status", "st"],
|
||||
"loads",
|
||||
@@ -472,11 +474,82 @@ def env_remove(args):
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
|
||||
|
||||
#
|
||||
# env rename
|
||||
#
|
||||
def env_rename_setup_parser(subparser):
|
||||
"""rename an existing environment"""
|
||||
subparser.add_argument(
|
||||
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
action="store_true",
|
||||
help="the specified arguments correspond to directory paths",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||
)
|
||||
|
||||
|
||||
def env_rename(args):
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an anonymous environment.
|
||||
"""
|
||||
|
||||
# Directory option has been specified
|
||||
if args.dir:
|
||||
if not ev.is_env_dir(args.mv_from):
|
||||
tty.die("The specified path does not correspond to a valid spack environment")
|
||||
from_path = Path(args.mv_from)
|
||||
if not args.force:
|
||||
if ev.is_env_dir(args.mv_to):
|
||||
tty.die(
|
||||
"The new path corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
if Path(args.mv_to).exists():
|
||||
tty.die("The new path already exists; specify the --force flag to overwrite it.")
|
||||
to_path = Path(args.mv_to)
|
||||
|
||||
# Name option being used
|
||||
elif ev.exists(args.mv_from):
|
||||
from_path = ev.environment.environment_dir_from_name(args.mv_from)
|
||||
if not args.force and ev.exists(args.mv_to):
|
||||
tty.die(
|
||||
"The new name corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
to_path = ev.environment.root(args.mv_to)
|
||||
|
||||
# Neither
|
||||
else:
|
||||
tty.die("The specified name does not correspond to a managed spack environment")
|
||||
|
||||
# Guard against renaming from or to an active environment
|
||||
active_env = ev.active_environment()
|
||||
if active_env:
|
||||
from_env = ev.Environment(from_path)
|
||||
if from_env.path == active_env.path:
|
||||
tty.die("Cannot rename active environment")
|
||||
if to_path == active_env.path:
|
||||
tty.die(f"{args.mv_to} is an active environment")
|
||||
|
||||
shutil.rmtree(to_path, ignore_errors=True)
|
||||
fs.rename(from_path, to_path)
|
||||
tty.msg(f"Successfully renamed environment {args.mv_from} to {args.mv_to}")
|
||||
|
||||
|
||||
#
|
||||
# env list
|
||||
#
|
||||
def env_list_setup_parser(subparser):
|
||||
"""list available environments"""
|
||||
"""list managed environments"""
|
||||
|
||||
|
||||
def env_list(args):
|
||||
|
@@ -18,6 +18,7 @@
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
@@ -152,9 +153,9 @@ def external_find(args):
|
||||
def packages_to_search_for(
|
||||
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True))
|
||||
result = list(
|
||||
{pkg for tag in tags for pkg in spack.repo.PATH.packages_with_tags(tag, full=True)}
|
||||
)
|
||||
|
||||
if names:
|
||||
# Match both fully qualified and unqualified
|
||||
|
@@ -18,7 +18,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
||||
subparser.add_argument(
|
||||
"-m",
|
||||
"--missing",
|
||||
@@ -28,7 +28,7 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
subparser.epilog = (
|
||||
"With an active environment, the specs "
|
||||
"parameter can be omitted. In this case all (uninstalled"
|
||||
@@ -40,9 +40,6 @@ def fetch(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
else:
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import copy
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -14,6 +13,7 @@
|
||||
import spack.cmd as cmd
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
@@ -69,6 +69,12 @@ def setup_parser(subparser):
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
"--only-roots",
|
||||
action="store_true",
|
||||
help="don't show full list of installed specs in an environment",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--show-concretized",
|
||||
@@ -140,6 +146,12 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
default="all",
|
||||
help="Install trees to query: 'all' (default), 'local', 'upstream', upstream name or path",
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -168,6 +180,12 @@ def query_arguments(args):
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
if install_tree in upstreams.keys():
|
||||
install_tree = upstreams[install_tree]["install_tree"]
|
||||
q_args["install_tree"] = install_tree
|
||||
|
||||
# Time window of installation
|
||||
for attribute in ("start_date", "end_date"):
|
||||
date = getattr(args, attribute)
|
||||
@@ -177,26 +195,22 @@ def query_arguments(args):
|
||||
return q_args
|
||||
|
||||
|
||||
def setup_env(env):
|
||||
def make_env_decorator(env):
|
||||
"""Create a function for decorating specs when in an environment."""
|
||||
|
||||
def strip_build(seq):
|
||||
return set(s.copy(deps=("link", "run")) for s in seq)
|
||||
|
||||
added = set(strip_build(env.added_specs()))
|
||||
roots = set(strip_build(env.roots()))
|
||||
removed = set(strip_build(env.removed_specs()))
|
||||
roots = set(env.roots())
|
||||
removed = set(env.removed_specs())
|
||||
|
||||
def decorator(spec, fmt):
|
||||
# add +/-/* to show added/removed/root specs
|
||||
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
||||
return color.colorize("@*{%s}" % fmt)
|
||||
return color.colorize(f"@*{{{fmt}}}")
|
||||
elif spec in removed:
|
||||
return color.colorize("@K{%s}" % fmt)
|
||||
return color.colorize(f"@K{{{fmt}}}")
|
||||
else:
|
||||
return "%s" % fmt
|
||||
return fmt
|
||||
|
||||
return decorator, added, roots, removed
|
||||
return decorator
|
||||
|
||||
|
||||
def display_env(env, args, decorator, results):
|
||||
@@ -211,28 +225,51 @@ def display_env(env, args, decorator, results):
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
|
||||
if not env.user_specs:
|
||||
tty.msg("No root specs")
|
||||
else:
|
||||
tty.msg("Root specs")
|
||||
num_roots = len(env.user_specs) or "No"
|
||||
tty.msg(f"{num_roots} root specs")
|
||||
|
||||
# Root specs cannot be displayed with prefixes, since those are not
|
||||
# set for abstract specs. Same for hashes
|
||||
root_args = copy.copy(args)
|
||||
root_args.paths = False
|
||||
concrete_specs = {
|
||||
root: concrete_root
|
||||
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
|
||||
}
|
||||
|
||||
# Roots are displayed with variants, etc. so that we can see
|
||||
# specifically what the user asked for.
|
||||
def root_decorator(spec, string):
|
||||
"""Decorate root specs with their install status if needed"""
|
||||
concrete = concrete_specs.get(spec)
|
||||
if concrete:
|
||||
status = color.colorize(concrete.install_status().value)
|
||||
hash = concrete.dag_hash()
|
||||
else:
|
||||
status = color.colorize(spack.spec.InstallStatus.absent.value)
|
||||
hash = "-" * 32
|
||||
|
||||
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
|
||||
# TODO: space format idiosyncrasies is complicated. Fix this eventually
|
||||
status = status[:-2]
|
||||
|
||||
if args.long or args.very_long:
|
||||
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
|
||||
return f"{status} {hash} {string}"
|
||||
else:
|
||||
return f"{status} {string}"
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
cmd.display_specs(
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
args,
|
||||
# these are overrides of CLI args
|
||||
paths=False,
|
||||
long=False,
|
||||
very_long=False,
|
||||
# these enforce details in the root specs to show what the user asked for
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
)
|
||||
print()
|
||||
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
@@ -242,7 +279,7 @@ def display_env(env, args, decorator, results):
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results:
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
@@ -251,9 +288,10 @@ def find(parser, args):
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
decorator = lambda s, f: f
|
||||
if env:
|
||||
decorator, _, roots, _ = setup_env(env)
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
@@ -280,9 +318,12 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
|
@@ -176,7 +176,7 @@ def setup_parser(subparser):
|
||||
dest="install_source",
|
||||
help="install source files in prefix",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
arguments.add_common_arguments(subparser, ["no_checksum"])
|
||||
subparser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
@@ -326,9 +326,6 @@ def install(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
if args.log_file and not args.log_format:
|
||||
msg = "the '--log-format' must be specified when using '--log-file'"
|
||||
tty.die(msg)
|
||||
@@ -423,10 +420,9 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
|
||||
with reporter_factory(specs_to_install):
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
finally:
|
||||
# TODO: this is doing way too much to trigger
|
||||
# views and modules to be generated.
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
if env.views:
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
|
||||
|
||||
def concrete_specs_from_cli(args, install_kwargs):
|
||||
|
@@ -5,8 +5,6 @@
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
@@ -70,16 +68,6 @@ def setup_parser(subparser):
|
||||
help="load the first match if multiple packages match the spec",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_load",
|
||||
choices=["package", "dependencies"],
|
||||
help="select whether to load the package and its dependencies\n\n"
|
||||
"the default is to load the package and all dependencies. alternatively, "
|
||||
"one can decide to load only the package or only the dependencies",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
@@ -110,11 +98,6 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
if args.things_to_load != "package,dependencies":
|
||||
tty.warn(
|
||||
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs)
|
||||
for spec in specs:
|
||||
|
@@ -53,6 +53,7 @@ def setup_parser(subparser):
|
||||
"-S", "--stages", action="store_true", help="top level stage directory"
|
||||
)
|
||||
directories.add_argument(
|
||||
"-c",
|
||||
"--source-dir",
|
||||
action="store_true",
|
||||
help="source directory for a spec (requires it to be staged first)",
|
||||
|
@@ -28,7 +28,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
arguments.add_common_arguments(subparser, ["no_checksum"])
|
||||
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="mirror_command")
|
||||
|
||||
@@ -72,6 +72,7 @@ def setup_parser(subparser):
|
||||
" retrieve all versions of each package",
|
||||
)
|
||||
arguments.add_common_arguments(create_parser, ["specs"])
|
||||
arguments.add_concretizer_args(create_parser)
|
||||
|
||||
# Destroy
|
||||
destroy_parser = sp.add_parser("destroy", help=mirror_destroy.__doc__)
|
||||
@@ -107,6 +108,11 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--autopush",
|
||||
action="store_true",
|
||||
help=("set mirror to push automatically after installation"),
|
||||
)
|
||||
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
|
||||
add_parser_signed.add_argument(
|
||||
"--unsigned",
|
||||
@@ -174,6 +180,21 @@ def setup_parser(subparser):
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser_autopush = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_autopush.add_argument(
|
||||
"--autopush",
|
||||
help="set mirror to push automatically after installation",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_autopush.add_argument(
|
||||
"--no-autopush",
|
||||
help="set mirror to not push automatically after installation",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--unsigned",
|
||||
@@ -217,6 +238,7 @@ def mirror_add(args):
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
@@ -233,6 +255,8 @@ def mirror_add(args):
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
if args.autopush:
|
||||
connection["autopush"] = args.autopush
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
@@ -269,6 +293,8 @@ def _configure_mirror(args):
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
changes["autopush"] = args.autopush
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
@@ -549,7 +575,4 @@ def mirror(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
action[args.mirror_command](args)
|
||||
|
@@ -19,7 +19,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
@@ -33,9 +33,6 @@ def patch(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
for spec in specs:
|
||||
_patch(spack.cmd.matching_spec_from_env(spec).package)
|
||||
|
@@ -116,39 +116,38 @@ def ipython_interpreter(args):
|
||||
|
||||
def python_interpreter(args):
|
||||
"""A python interpreter is the default interpreter"""
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
elif args.python_args:
|
||||
propagate_exceptions_from(console)
|
||||
if args.python_args and not args.python_command:
|
||||
sys.argv = args.python_args
|
||||
with open(args.python_args[0]) as file:
|
||||
console.runsource(file.read(), args.python_args[0], "exec")
|
||||
runpy.run_path(args.python_args[0], run_name="__main__")
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def propagate_exceptions_from(console):
|
||||
|
@@ -91,7 +91,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
|
@@ -22,7 +22,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
||||
subparser.add_argument(
|
||||
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
|
||||
)
|
||||
@@ -33,9 +33,6 @@ def stage(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if not env:
|
||||
|
@@ -228,7 +228,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
def test_list(args):
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.PATH.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
tagged = spack.repo.PATH.packages_with_tags(*args.tag) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
tests = spack.install_test.test_functions(pkg_class)
|
||||
|
@@ -34,6 +34,13 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="show full pytest help, with advanced options",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-n",
|
||||
"--numprocesses",
|
||||
type=int,
|
||||
default=1,
|
||||
help="run tests in parallel up to this wide, default 1 for sequential",
|
||||
)
|
||||
|
||||
# extra spack arguments to list tests
|
||||
list_group = subparser.add_argument_group("listing tests")
|
||||
@@ -229,6 +236,16 @@ def unit_test(parser, args, unknown_args):
|
||||
if args.extension:
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
if args.numprocesses is not None and args.numprocesses > 1:
|
||||
pytest_args.extend(
|
||||
[
|
||||
"--dist",
|
||||
"loadfile",
|
||||
"--tx",
|
||||
f"{args.numprocesses}*popen//python=spack-tmpconfig spack python",
|
||||
]
|
||||
)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with llnl.util.filesystem.working_dir(pytest_root):
|
||||
if args.list:
|
||||
|
@@ -334,6 +334,40 @@ def __init__(
|
||||
# used for version checks for API, e.g. C++11 flag
|
||||
self._real_version = None
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.cc == other.cc
|
||||
and self.cxx == other.cxx
|
||||
and self.fc == other.fc
|
||||
and self.f77 == other.f77
|
||||
and self.spec == other.spec
|
||||
and self.operating_system == other.operating_system
|
||||
and self.target == other.target
|
||||
and self.flags == other.flags
|
||||
and self.modules == other.modules
|
||||
and self.environment == other.environment
|
||||
and self.extra_rpaths == other.extra_rpaths
|
||||
and self.enable_implicit_rpaths == other.enable_implicit_rpaths
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.cc,
|
||||
self.cxx,
|
||||
self.fc,
|
||||
self.f77,
|
||||
self.spec,
|
||||
self.operating_system,
|
||||
self.target,
|
||||
str(self.flags),
|
||||
str(self.modules),
|
||||
str(self.environment),
|
||||
str(self.extra_rpaths),
|
||||
self.enable_implicit_rpaths,
|
||||
)
|
||||
)
|
||||
|
||||
def verify_executables(self):
|
||||
"""Raise an error if any of the compiler executables is not valid.
|
||||
|
||||
@@ -389,8 +423,7 @@ def implicit_rpaths(self):
|
||||
|
||||
# Put CXX first since it has the most linking issues
|
||||
# And because it has flags that affect linking
|
||||
exe_paths = [x for x in [self.cxx, self.cc, self.fc, self.f77] if x]
|
||||
link_dirs = self._get_compiler_link_paths(exe_paths)
|
||||
link_dirs = self._get_compiler_link_paths()
|
||||
|
||||
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
@@ -403,43 +436,33 @@ def required_libs(self):
|
||||
# By default every compiler returns the empty list
|
||||
return []
|
||||
|
||||
def _get_compiler_link_paths(self, paths):
|
||||
first_compiler = next((c for c in paths if c), None)
|
||||
if not first_compiler:
|
||||
return []
|
||||
if not self.verbose_flag:
|
||||
# In this case there is no mechanism to learn what link directories
|
||||
# are used by the compiler
|
||||
def _get_compiler_link_paths(self):
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if not cc or not self.verbose_flag:
|
||||
# Cannot determine implicit link paths without a compiler / verbose flag
|
||||
return []
|
||||
|
||||
# What flag types apply to first_compiler, in what order
|
||||
flags = ["cppflags", "ldflags"]
|
||||
if first_compiler == self.cc:
|
||||
flags = ["cflags"] + flags
|
||||
elif first_compiler == self.cxx:
|
||||
flags = ["cxxflags"] + flags
|
||||
if cc == self.cc:
|
||||
flags = ["cflags", "cppflags", "ldflags"]
|
||||
else:
|
||||
flags.append("fflags")
|
||||
flags = ["cxxflags", "cppflags", "ldflags"]
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
fout = os.path.join(tmpdir, "output")
|
||||
fin = os.path.join(tmpdir, "main.c")
|
||||
|
||||
with open(fin, "w+") as csource:
|
||||
with open(fin, "w") as csource:
|
||||
csource.write(
|
||||
"int main(int argc, char* argv[]) { " "(void)argc; (void)argv; return 0; }\n"
|
||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||
)
|
||||
compiler_exe = spack.util.executable.Executable(first_compiler)
|
||||
cc_exe = spack.util.executable.Executable(cc)
|
||||
for flag_type in flags:
|
||||
for flag in self.flags.get(flag_type, []):
|
||||
compiler_exe.add_default_arg(flag)
|
||||
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
||||
|
||||
output = ""
|
||||
with self.compiler_environment():
|
||||
output = str(
|
||||
compiler_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
) # str for py2
|
||||
output = cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
return _parse_non_system_link_dirs(output)
|
||||
except spack.util.executable.ProcessError as pe:
|
||||
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -109,29 +110,128 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=True):
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
packages = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in config.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
packages.extend(_compiler_config_from_package_config(externals_config))
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def _compiler_config_from_package_config(config):
|
||||
compilers = []
|
||||
for entry in config:
|
||||
compiler = _compiler_config_from_external(entry)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
extra_attributes = config.get("extra_attributes", {})
|
||||
prefix = config.get("prefix", None)
|
||||
|
||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
||||
paths = extra_attributes.get("paths", {})
|
||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
||||
for lang in compiler_langs:
|
||||
if paths.setdefault(lang, None):
|
||||
continue
|
||||
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
# Check for files that satisfy the naming scheme for this compiler
|
||||
bindir = os.path.join(prefix, "bin")
|
||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
||||
if regex.match(f):
|
||||
paths[lang] = os.path.join(bindir, f)
|
||||
|
||||
if all(v is None for v in paths.values()):
|
||||
return None
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.target
|
||||
if not target:
|
||||
host_platform = spack.platforms.host()
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": config.get("modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -142,17 +242,22 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
|
||||
|
||||
def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
def add_compilers_to_config(compilers, scope=None):
|
||||
"""Add compilers to the config for the specified architecture.
|
||||
|
||||
Arguments:
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -184,6 +289,9 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
msg = "`spack compiler remove` will not remove compilers defined in packages.yaml"
|
||||
msg += "\nTo remove these compilers, either edit the config or use `spack external remove`"
|
||||
tty.debug(msg)
|
||||
return removal_happened
|
||||
|
||||
|
||||
@@ -198,7 +306,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -213,22 +323,36 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
return get_compiler_config(scope, init_config)
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
# Dedupe entries by the compiler they represent
|
||||
# If the entry is invalid, treat it as unique for deduplication
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(scope, init_config)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
@@ -388,11 +512,20 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = get_compiler_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items) # can be None in error case
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
return compilers
|
||||
|
||||
|
||||
@@ -403,10 +536,7 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
if use_cache:
|
||||
config = all_compilers_config(scope, init_config)
|
||||
else:
|
||||
config = get_compiler_config(scope, init_config)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -416,7 +546,7 @@ def compilers_for_spec(
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(scope)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
@@ -502,7 +632,10 @@ def _compiler_from_config_entry(items):
|
||||
compiler = _compiler_cache.get(config_id, None)
|
||||
|
||||
if compiler is None:
|
||||
compiler = compiler_from_dict(items)
|
||||
try:
|
||||
compiler = compiler_from_dict(items)
|
||||
except UnknownCompilerError as e:
|
||||
warnings.warn(e.message)
|
||||
_compiler_cache[config_id] = compiler
|
||||
|
||||
return compiler
|
||||
@@ -555,7 +688,9 @@ def get_compilers(config, cspec=None, arch_spec=None):
|
||||
raise ValueError(msg)
|
||||
continue
|
||||
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
@@ -583,9 +718,7 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
compilers = compilers_for_spec(
|
||||
compiler_spec, arch_spec=arch_spec, scope=scope, use_cache=False
|
||||
)
|
||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
|
||||
if compilers:
|
||||
scope_to_compilers[scope] = compilers
|
||||
|
||||
|
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compilers.oneapi
|
||||
|
||||
|
||||
class Dpcpp(spack.compilers.oneapi.Oneapi):
|
||||
"""This is the same as the oneAPI compiler but uses dpcpp instead of
|
||||
icpx (for DPC++ source files). It explicitly refers to dpcpp, so that
|
||||
CMake test files which check the compiler name (e.g. CMAKE_CXX_COMPILER)
|
||||
detect it as dpcpp.
|
||||
|
||||
Ideally we could switch out icpx for dpcpp where needed in the oneAPI
|
||||
compiler definition, but two things are needed for that: (a) a way to
|
||||
tell the compiler that it should be using dpcpp and (b) a way to
|
||||
customize the link_paths
|
||||
|
||||
See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
|
||||
"""
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["dpcpp"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("oneapi", "icx"),
|
||||
"cxx": os.path.join("oneapi", "dpcpp"),
|
||||
"f77": os.path.join("oneapi", "ifx"),
|
||||
"fc": os.path.join("oneapi", "ifx"),
|
||||
}
|
@@ -8,7 +8,9 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Set
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.operating_systems.windows_os
|
||||
@@ -18,15 +20,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.version import Version, VersionRange
|
||||
|
||||
avail_fc_version: Set[str] = set()
|
||||
fc_path: Dict[str, str] = dict()
|
||||
|
||||
fortran_mapping = {
|
||||
"2021.3.0": "19.29.30133",
|
||||
"2021.2.1": "19.28.29913",
|
||||
"2021.2.0": "19.28.29334",
|
||||
"2021.1.0": "19.28.29333",
|
||||
}
|
||||
FC_PATH: Dict[str, str] = dict()
|
||||
|
||||
|
||||
class CmdCall:
|
||||
@@ -113,15 +107,13 @@ def command_str(self):
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
def get_valid_fortran_pth():
|
||||
"""Assign maximum available fortran compiler version"""
|
||||
# TODO (johnwparent): validate compatibility w/ try compiler
|
||||
# functionality when added
|
||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
if ver in fortran_mapping:
|
||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
||||
return fc_path[ver]
|
||||
return None
|
||||
sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
|
||||
return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
|
||||
|
||||
|
||||
class Msvc(Compiler):
|
||||
@@ -165,11 +157,9 @@ def __init__(self, *args, **kwargs):
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
latest_fc = get_valid_fortran_pth()
|
||||
new_pth = [pth if pth else latest_fc for pth in paths[2:]]
|
||||
paths[2:] = new_pth
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -181,11 +171,14 @@ def __init__(self, *args, **kwargs):
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
if str(archspec.cpu.host().family) == "x86_64":
|
||||
arch = "amd64"
|
||||
|
||||
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
|
||||
env_cmds.append(self.vcvars_call)
|
||||
# Below is a check for a valid fortran path
|
||||
@@ -193,11 +186,34 @@ def __init__(self, *args, **kwargs):
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
|
||||
def get_oneapi_root(pth: str):
|
||||
"""From within a prefix known to be a oneAPI path
|
||||
determine the oneAPI root path from arbitrary point
|
||||
under root
|
||||
|
||||
Args:
|
||||
pth: path prefixed within oneAPI root
|
||||
"""
|
||||
if not pth:
|
||||
return ""
|
||||
while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
|
||||
pth = os.path.dirname(pth)
|
||||
return pth
|
||||
|
||||
# If this found, it sets all the vars
|
||||
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||
oneapi_root = get_oneapi_root(self.fc)
|
||||
if not oneapi_root:
|
||||
raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
# some oneAPI exes return a version more precise than their
|
||||
# install paths specify, so we determine path from
|
||||
# the install path rather than the fc executable itself
|
||||
numver = r"\d+\.\d+(?:\.\d+)?"
|
||||
pattern = f"((?:{numver})|(?:latest))"
|
||||
version_from_path = re.search(pattern, self.fc).group(1)
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
oneapi_root, "compiler", version_from_path, "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
@@ -309,23 +325,19 @@ def setup_custom_environment(self, pkg, env):
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
# We're using intel for the Fortran compilers, which exist if
|
||||
# ONEAPI_ROOT is a meaningful variable
|
||||
if not sys.platform == "win32":
|
||||
return "unknown"
|
||||
fc_ver = cls.default_version(fc)
|
||||
avail_fc_version.add(fc_ver)
|
||||
fc_path[fc_ver] = fc
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs.compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError("Windows compiler search paths not established")
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
ver = cls.default_version(clp)
|
||||
else:
|
||||
ver = fc_ver
|
||||
return ver
|
||||
FC_PATH[fc_ver] = fc
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError(
|
||||
"Windows compiler search paths not established, "
|
||||
"please report this behavior to github.com/spack/spack"
|
||||
)
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
return cls.default_version(clp) if clp else fc_ver
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
|
@@ -749,7 +749,6 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
|
@@ -107,7 +107,7 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user,command_line}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
@@ -764,6 +764,31 @@ def _add_platform_scope(
|
||||
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||
|
||||
|
||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
"""Load configuration paths from entry points
|
||||
|
||||
A python package can register entry point metadata so that Spack can find
|
||||
its configuration by adding the following to the project's pyproject.toml:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[project.entry-points."spack.config"]
|
||||
baz = "baz:get_spack_config_path"
|
||||
|
||||
The function ``get_spack_config_path`` returns the path to the package's
|
||||
spack configuration scope
|
||||
|
||||
"""
|
||||
config_paths: List[Tuple[str, str]] = []
|
||||
for entry_point in lang.get_entry_points(group="spack.config"):
|
||||
hook = entry_point.load()
|
||||
if callable(hook):
|
||||
config_path = hook()
|
||||
if config_path and os.path.exists(config_path):
|
||||
config_paths.append(("plugin-%s" % entry_point.name, str(config_path)))
|
||||
return config_paths
|
||||
|
||||
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
@@ -816,6 +841,9 @@ def create() -> Configuration:
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(("site", os.path.join(spack.paths.etc_path)))
|
||||
|
||||
# Python package's can register configuration scopes via entry_points
|
||||
configuration_paths.extend(config_paths_from_entry_points())
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
# This is disabled if user asks for no local configuration.
|
||||
if not disable_local_config:
|
||||
@@ -1534,8 +1562,9 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
||||
def use_configuration(
|
||||
*scopes_or_paths: Union[ConfigScope, str]
|
||||
) -> Generator[Configuration, None, None]:
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
"""Use the configuration scopes passed as arguments within the context manager.
|
||||
|
||||
This function invalidates caches, and is therefore very slow.
|
||||
|
||||
Args:
|
||||
*scopes_or_paths: scope objects or paths to be used
|
||||
|
@@ -19,9 +19,6 @@
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora38",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "docker.io/fedora:38"
|
||||
}
|
||||
@@ -33,9 +30,6 @@
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora37",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "docker.io/fedora:37"
|
||||
}
|
||||
@@ -47,9 +41,6 @@
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "docker.io/rockylinux:9"
|
||||
}
|
||||
@@ -61,9 +52,6 @@
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/rockylinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "docker.io/rockylinux:8"
|
||||
}
|
||||
@@ -75,9 +63,6 @@
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux9",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "quay.io/almalinuxorg/almalinux:9"
|
||||
}
|
||||
@@ -89,9 +74,6 @@
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux8",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||
}
|
||||
@@ -105,9 +87,6 @@
|
||||
"build": "spack/centos-stream",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
},
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
@@ -115,10 +94,7 @@
|
||||
"template": "container/centos_7.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"build": "spack/centos7",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
"build": "spack/centos7"
|
||||
},
|
||||
"opensuse/leap:15": {
|
||||
"bootstrap": {
|
||||
@@ -126,9 +102,6 @@
|
||||
},
|
||||
"os_package_manager": "zypper",
|
||||
"build": "spack/leap15",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "opensuse/leap:latest"
|
||||
}
|
||||
@@ -148,19 +121,13 @@
|
||||
"template": "container/ubuntu_2204.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-jammy",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
"build": "spack/ubuntu-jammy"
|
||||
},
|
||||
"ubuntu:20.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_2004.dockerfile"
|
||||
},
|
||||
"build": "spack/ubuntu-focal",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"os_package_manager": "apt"
|
||||
},
|
||||
"ubuntu:18.04": {
|
||||
@@ -168,10 +135,7 @@
|
||||
"template": "container/ubuntu_1804.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-bionic",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
}
|
||||
"build": "spack/ubuntu-bionic"
|
||||
}
|
||||
},
|
||||
"os_package_managers": {
|
||||
|
@@ -50,10 +50,7 @@ def build_info(image, spack_version):
|
||||
if not build_image:
|
||||
return None, None
|
||||
|
||||
# Translate version from git to docker if necessary
|
||||
build_tag = image_data["build_tags"].get(spack_version, spack_version)
|
||||
|
||||
return build_image, build_tag
|
||||
return build_image, spack_version
|
||||
|
||||
|
||||
def os_package_manager_for(image):
|
||||
|
@@ -227,7 +227,7 @@ def read(path, apply_updates):
|
||||
if apply_updates and compilers:
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler], init_config=False)
|
||||
spack.compilers.add_compilers_to_config([compiler])
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
|
@@ -1621,15 +1621,32 @@ def query_local(self, *args, **kwargs):
|
||||
query_local.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
def query(self, *args, **kwargs):
|
||||
"""Query the Spack database including all upstream databases."""
|
||||
"""Query the Spack database including all upstream databases.
|
||||
|
||||
Additional Arguments:
|
||||
install_tree (str): query 'all' (default), 'local', 'upstream', or upstream path
|
||||
"""
|
||||
install_tree = kwargs.pop("install_tree", "all")
|
||||
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
|
||||
if install_tree not in valid_trees:
|
||||
msg = "Invalid install_tree argument to Database.query()\n"
|
||||
msg += f"Try one of {', '.join(valid_trees)}"
|
||||
tty.error(msg)
|
||||
return []
|
||||
|
||||
upstream_results = []
|
||||
for upstream_db in self.upstream_dbs:
|
||||
upstreams = self.upstream_dbs
|
||||
if install_tree not in ("all", "upstream"):
|
||||
upstreams = [u for u in self.upstream_dbs if u.root == install_tree]
|
||||
for upstream_db in upstreams:
|
||||
# queries for upstream DBs need to *not* lock - we may not
|
||||
# have permissions to do this and the upstream DBs won't know about
|
||||
# us anyway (so e.g. they should never uninstall specs)
|
||||
upstream_results.extend(upstream_db._query(*args, **kwargs) or [])
|
||||
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
local_results = []
|
||||
if install_tree in ("all", "local") or self.root == install_tree:
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
|
||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||
|
||||
|
@@ -9,8 +9,6 @@
|
||||
import tempfile
|
||||
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
|
||||
|
||||
import jinja2
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.repo
|
||||
@@ -85,6 +83,8 @@ def _mock_layout(self) -> Generator[List[str], None, None]:
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
|
||||
import jinja2
|
||||
|
||||
relative_paths = mock_executables.executables
|
||||
script = mock_executables.script
|
||||
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user