Compare commits
543 Commits
features/i
...
packages/m
Author | SHA1 | Date | |
---|---|---|---|
![]() |
314fcc15f4 | ||
![]() |
3b4ec8ced4 | ||
![]() |
c9ab0d8fcb | ||
![]() |
c45e02d58f | ||
![]() |
33c8f518ae | ||
![]() |
2491a9abff | ||
![]() |
1a26ec7b8b | ||
![]() |
89a79d3df0 | ||
![]() |
ce700d69d7 | ||
![]() |
a505fb1f37 | ||
![]() |
f039b22093 | ||
![]() |
18ea8f813e | ||
![]() |
d7e740defa | ||
![]() |
c21dc1a27a | ||
![]() |
f30d8ea2a5 | ||
![]() |
03cb30cb96 | ||
![]() |
f6da037129 | ||
![]() |
31c2897fd8 | ||
![]() |
1a379215da | ||
![]() |
0f7c1b5e38 | ||
![]() |
7e3af5d42d | ||
![]() |
f45e312f81 | ||
![]() |
a82e21e82f | ||
![]() |
1ba40b99ee | ||
![]() |
60f2698a4a | ||
![]() |
b3772f8bb6 | ||
![]() |
cd75e52ba2 | ||
![]() |
b0b316c646 | ||
![]() |
7bbf581169 | ||
![]() |
7b93d01a68 | ||
![]() |
a95fa26857 | ||
![]() |
6f2393a345 | ||
![]() |
9fa2bb375c | ||
![]() |
98c08ce5c6 | ||
![]() |
83f115894b | ||
![]() |
59339be48f | ||
![]() |
ef0599b53c | ||
![]() |
9c4207a551 | ||
![]() |
eb95390ce7 | ||
![]() |
527d723db0 | ||
![]() |
63fe6fc893 | ||
![]() |
4f2a1806f9 | ||
![]() |
12a7e8d73a | ||
![]() |
21d8c09c5e | ||
![]() |
43596b4e23 | ||
![]() |
97edcb5acc | ||
![]() |
fc268b0945 | ||
![]() |
0b4477c0df | ||
![]() |
eff4c14a09 | ||
![]() |
f485a622c8 | ||
![]() |
f151bc65f7 | ||
![]() |
99d849b2e6 | ||
![]() |
3d8f9a7b22 | ||
![]() |
c88e7bc492 | ||
![]() |
931d034da4 | ||
![]() |
a3a49daf8f | ||
![]() |
2c05ce3607 | ||
![]() |
6587b2a231 | ||
![]() |
f1c743e235 | ||
![]() |
a93b5fd4ae | ||
![]() |
f8823ec3c9 | ||
![]() |
4e068ab7e0 | ||
![]() |
b932c14008 | ||
![]() |
285f95a4d8 | ||
![]() |
3de68ef976 | ||
![]() |
5c7fe24bec | ||
![]() |
ecb122f4c1 | ||
![]() |
6219780691 | ||
![]() |
8ec1369d2b | ||
![]() |
e3fcc41162 | ||
![]() |
ae582c45c3 | ||
![]() |
252a4d1076 | ||
![]() |
df37a8ba76 | ||
![]() |
99d06b95a3 | ||
![]() |
38829b01df | ||
![]() |
2a6a6602da | ||
![]() |
1527e9703d | ||
![]() |
4a22df5477 | ||
![]() |
40b40ae1a8 | ||
![]() |
2b4f2daa73 | ||
![]() |
02501bc4af | ||
![]() |
7cd039d022 | ||
![]() |
1ff81c1c88 | ||
![]() |
3e3cb73446 | ||
![]() |
8e948c03fc | ||
![]() |
572e790b3d | ||
![]() |
1873d6909a | ||
![]() |
4a24ab53df | ||
![]() |
671c394d32 | ||
![]() |
ce3b511f59 | ||
![]() |
fbd97b0556 | ||
![]() |
d145ca6da4 | ||
![]() |
03073a5fed | ||
![]() |
787bff0d6a | ||
![]() |
2504a76079 | ||
![]() |
f665f4c41b | ||
![]() |
4cab31323c | ||
![]() |
fcbe8c50cd | ||
![]() |
37de90c98c | ||
![]() |
5ccd9dc64b | ||
![]() |
1f59ada2c2 | ||
![]() |
0d51440648 | ||
![]() |
1825557241 | ||
![]() |
1c0c73a404 | ||
![]() |
a8a402115b | ||
![]() |
c2f3539a5e | ||
![]() |
cdeb67ec02 | ||
![]() |
2ddd8cd1aa | ||
![]() |
5b352c3088 | ||
![]() |
95c26245c1 | ||
![]() |
6a0e03b81c | ||
![]() |
858f70bf6f | ||
![]() |
123c26c22d | ||
![]() |
b42ef1e7b8 | ||
![]() |
2f2c65f56b | ||
![]() |
883d0739e6 | ||
![]() |
f1a31fe5f7 | ||
![]() |
c3785f4d30 | ||
![]() |
cc8983cf82 | ||
![]() |
30cea3ce8a | ||
![]() |
1252bd975c | ||
![]() |
6547758b2f | ||
![]() |
c633149874 | ||
![]() |
d640ce74e0 | ||
![]() |
6d2cc2d27a | ||
![]() |
43f180c2c5 | ||
![]() |
0685c6277e | ||
![]() |
eaabde6ee9 | ||
![]() |
87505fc2fc | ||
![]() |
d7d886e3b5 | ||
![]() |
0b3bd1e294 | ||
![]() |
b9b7450f60 | ||
![]() |
a6b0dfbd53 | ||
![]() |
ecc3752ee9 | ||
![]() |
8e2caa2b83 | ||
![]() |
25af7a36aa | ||
![]() |
38daed0a78 | ||
![]() |
fc3b732b14 | ||
![]() |
382847976f | ||
![]() |
c1b423849c | ||
![]() |
45ea09a79f | ||
![]() |
b96af088d1 | ||
![]() |
d47478d7b6 | ||
![]() |
4763581642 | ||
![]() |
d264094fdc | ||
![]() |
3c8c7ef341 | ||
![]() |
f83beb09ba | ||
![]() |
3604e5bffc | ||
![]() |
7fba228cf3 | ||
![]() |
1d379d96ab | ||
![]() |
f3edc33a07 | ||
![]() |
8d4ea9dbd3 | ||
![]() |
742d313ba8 | ||
![]() |
70407e8970 | ||
![]() |
2d42675035 | ||
![]() |
4c50915d81 | ||
![]() |
3f8d5fed39 | ||
![]() |
66c1c213b1 | ||
![]() |
f46528ec6b | ||
![]() |
41489efa4c | ||
![]() |
3df5a85237 | ||
![]() |
8921612f6a | ||
![]() |
e6a0a6c145 | ||
![]() |
104d6b4484 | ||
![]() |
cba9436cf4 | ||
![]() |
9dc3ad4db7 | ||
![]() |
4bfd7aeb25 | ||
![]() |
fcf615b53e | ||
![]() |
1155318534 | ||
![]() |
6343708620 | ||
![]() |
a3c430e810 | ||
![]() |
41ff0500f9 | ||
![]() |
059a4a58e2 | ||
![]() |
14513ba76f | ||
![]() |
21da90e062 | ||
![]() |
a3c7e97463 | ||
![]() |
f7ed3ce4ae | ||
![]() |
36caa6158a | ||
![]() |
1904d99fd0 | ||
![]() |
de0b17c07f | ||
![]() |
5d695623db | ||
![]() |
3f063ace1d | ||
![]() |
47b71ba8ca | ||
![]() |
67eb9cfccb | ||
![]() |
dddbd944a4 | ||
![]() |
b7d85e7694 | ||
![]() |
f4c4b06a46 | ||
![]() |
6995010bab | ||
![]() |
2d212561fb | ||
![]() |
7cab3e2383 | ||
![]() |
48ca9a5f3c | ||
![]() |
1934c8cf73 | ||
![]() |
42cd7c4f89 | ||
![]() |
ce654b6882 | ||
![]() |
94719a55b4 | ||
![]() |
76168292c3 | ||
![]() |
3fd6066e54 | ||
![]() |
c62cc6a45d | ||
![]() |
423548fc90 | ||
![]() |
9010e6f556 | ||
![]() |
6085586407 | ||
![]() |
dbd745bdab | ||
![]() |
31c5c0b423 | ||
![]() |
41f99f8131 | ||
![]() |
441ade5809 | ||
![]() |
60f6f8d836 | ||
![]() |
5e7925c502 | ||
![]() |
d39382bec8 | ||
![]() |
be270f2311 | ||
![]() |
c500200952 | ||
![]() |
71b110e6c7 | ||
![]() |
7b877ec9e2 | ||
![]() |
a74ac87d34 | ||
![]() |
796adb6b9b | ||
![]() |
2967bb5540 | ||
![]() |
9f4c677e46 | ||
![]() |
1d369ba02d | ||
![]() |
dcde4f9d5a | ||
![]() |
3c576ca8c2 | ||
![]() |
a89c89a23e | ||
![]() |
aee7455568 | ||
![]() |
69edcc6d2f | ||
![]() |
46263a493e | ||
![]() |
b24f2875e6 | ||
![]() |
18eebce04d | ||
![]() |
f5c6e10e08 | ||
![]() |
e7c17f7ed8 | ||
![]() |
a284cbf256 | ||
![]() |
8cbf067455 | ||
![]() |
875397cf16 | ||
![]() |
a38045f77e | ||
![]() |
31ce23f3fc | ||
![]() |
9e65bd5837 | ||
![]() |
2c1a3eff74 | ||
![]() |
1d81ceb101 | ||
![]() |
044c37372a | ||
![]() |
8f40889a46 | ||
![]() |
0a0282163b | ||
![]() |
54f4530df4 | ||
![]() |
193f3b3c5a | ||
![]() |
34b0e8ebce | ||
![]() |
10109bf128 | ||
![]() |
f0a7388496 | ||
![]() |
45bc8fd2a3 | ||
![]() |
ca82085c82 | ||
![]() |
b97fbcb970 | ||
![]() |
cf812dd3a9 | ||
![]() |
e78d9d93dd | ||
![]() |
be492e1ed7 | ||
![]() |
fcfbc28e10 | ||
![]() |
8fb5898d10 | ||
![]() |
b75e35289c | ||
![]() |
4024200d61 | ||
![]() |
eab1d6df80 | ||
![]() |
0d7c0c8362 | ||
![]() |
a573f2248d | ||
![]() |
986102ab7a | ||
![]() |
04f6881b76 | ||
![]() |
d4582945ba | ||
![]() |
a0940510df | ||
![]() |
c2327a2adf | ||
![]() |
4c075801db | ||
![]() |
1d27add307 | ||
![]() |
3256ad8e5c | ||
![]() |
dc8678136c | ||
![]() |
62ec0f6d33 | ||
![]() |
25d8e95ad4 | ||
![]() |
883bbf3826 | ||
![]() |
1dc9bac745 | ||
![]() |
8ac5398576 | ||
![]() |
dbd3895cbf | ||
![]() |
1d70dc8292 | ||
![]() |
4b2a96fe06 | ||
![]() |
4a7508c9df | ||
![]() |
4f27ef8157 | ||
![]() |
069010fe13 | ||
![]() |
0fa64f9791 | ||
![]() |
8d23edd1a9 | ||
![]() |
336d33ecfa | ||
![]() |
caaf0c50f6 | ||
![]() |
6b2cd0ca45 | ||
![]() |
0bec90ecd7 | ||
![]() |
1f77b33255 | ||
![]() |
6dab20e8f8 | ||
![]() |
f926512cd4 | ||
![]() |
4a08f5b6e4 | ||
![]() |
2cbc21d584 | ||
![]() |
b4646c340c | ||
![]() |
24efb56528 | ||
![]() |
19f7a1bfbd | ||
![]() |
bc5b57dca9 | ||
![]() |
ebef5f75fb | ||
![]() |
e45ee9cb92 | ||
![]() |
900fff77cd | ||
![]() |
e97a78ebcc | ||
![]() |
25beeef865 | ||
![]() |
b3ded1332e | ||
![]() |
b66694d1ca | ||
![]() |
ebb2bb206e | ||
![]() |
7a489e1e4e | ||
![]() |
940f47a47c | ||
![]() |
ccea1c6c9b | ||
![]() |
476863c4e8 | ||
![]() |
7794d51adb | ||
![]() |
0a6767e602 | ||
![]() |
b3585ff1b8 | ||
![]() |
7e9c24a789 | ||
![]() |
c5b227d14c | ||
![]() |
620d5c7ef8 | ||
![]() |
74f78bd24f | ||
![]() |
fa9dcb43bd | ||
![]() |
9a37a6fcb1 | ||
![]() |
4ae4739537 | ||
![]() |
493a307e4f | ||
![]() |
6fb5a1492a | ||
![]() |
cc3d40d9d3 | ||
![]() |
8fc1ccc686 | ||
![]() |
8a8d88aab9 | ||
![]() |
246ac7ced9 | ||
![]() |
fd1b982073 | ||
![]() |
fd31f7e014 | ||
![]() |
e70d7d4eb7 | ||
![]() |
b4b35f9efd | ||
![]() |
aa05af81d0 | ||
![]() |
3da44cff0b | ||
![]() |
95de0c021b | ||
![]() |
9347769d4b | ||
![]() |
8885f6b861 | ||
![]() |
c5e5ed3a3b | ||
![]() |
23d7305efd | ||
![]() |
252ceeedbe | ||
![]() |
6df832d979 | ||
![]() |
4a88884a8e | ||
![]() |
84dcc654ec | ||
![]() |
b6722ce5c9 | ||
![]() |
1cbee69bec | ||
![]() |
1cf311f217 | ||
![]() |
c960fa0996 | ||
![]() |
69a95bf1f8 | ||
![]() |
513142f154 | ||
![]() |
d6b6910654 | ||
![]() |
ae78c7698a | ||
![]() |
f4f1606298 | ||
![]() |
d2dd4e96d9 | ||
![]() |
4cb64e150f | ||
![]() |
b74e23a637 | ||
![]() |
8ffd6c29bf | ||
![]() |
4372907fc1 | ||
![]() |
63a506ed17 | ||
![]() |
382647c8af | ||
![]() |
4b73da5bb2 | ||
![]() |
17b47c9dbe | ||
![]() |
5075275873 | ||
![]() |
2acdacb129 | ||
![]() |
bedc7bd518 | ||
![]() |
dd8d2a2515 | ||
![]() |
129338c4c9 | ||
![]() |
e7b009e350 | ||
![]() |
9e6e478ccf | ||
![]() |
357089f347 | ||
![]() |
6228247eda | ||
![]() |
a919b67cb4 | ||
![]() |
58ac6f7cba | ||
![]() |
86b57c233d | ||
![]() |
b6dec56f4f | ||
![]() |
d403060cf2 | ||
![]() |
7d0dd27363 | ||
![]() |
cfbc92c2f0 | ||
![]() |
e7bca5b8f6 | ||
![]() |
cf5ba8aee3 | ||
![]() |
32a4eb4ebb | ||
![]() |
a3f4fd68d6 | ||
![]() |
95f8c7e073 | ||
![]() |
20f31ce39d | ||
![]() |
91ef8c056b | ||
![]() |
c6ce7637fc | ||
![]() |
770c6cc612 | ||
![]() |
97bad2f5a7 | ||
![]() |
8ca82fb2b6 | ||
![]() |
36540708f1 | ||
![]() |
7e027cae3e | ||
![]() |
a311d0a8c0 | ||
![]() |
cd8ebdcfbd | ||
![]() |
8b3bfbd95e | ||
![]() |
10afe49877 | ||
![]() |
2afbeded25 | ||
![]() |
415055d303 | ||
![]() |
081e4c463b | ||
![]() |
e5ec08771b | ||
![]() |
98605621e7 | ||
![]() |
625a4b854c | ||
![]() |
dcf2c8744a | ||
![]() |
d1b7cc9b5e | ||
![]() |
8fbe1ad941 | ||
![]() |
440ae973d1 | ||
![]() |
2cb140f9a8 | ||
![]() |
fb2cca4e1e | ||
![]() |
03b0d299f9 | ||
![]() |
8f93ea80fd | ||
![]() |
5cd5fcdd7f | ||
![]() |
da760a898e | ||
![]() |
dd55635fae | ||
![]() |
320c758fea | ||
![]() |
ff82ba24e9 | ||
![]() |
d00b05b71e | ||
![]() |
f8524f9d5e | ||
![]() |
924204828e | ||
![]() |
2f4c5f2aa2 | ||
![]() |
7e6a216d33 | ||
![]() |
87bbcefba9 | ||
![]() |
8ab1011192 | ||
![]() |
b2f8cd22c3 | ||
![]() |
e6e58423aa | ||
![]() |
5255af3981 | ||
![]() |
5d913d0708 | ||
![]() |
5fda19194b | ||
![]() |
3ea92b1983 | ||
![]() |
522fa9dc62 | ||
![]() |
65ec330af5 | ||
![]() |
72c1d0033f | ||
![]() |
6bfe83106d | ||
![]() |
d37e2c600c | ||
![]() |
276f77835c | ||
![]() |
b7f695e8b6 | ||
![]() |
db9630e9e0 | ||
![]() |
136a658746 | ||
![]() |
f0bfc7d898 | ||
![]() |
03ebb82752 | ||
![]() |
82d808d58d | ||
![]() |
43fa93c8e1 | ||
![]() |
aa70cb34e1 | ||
![]() |
9acb70f204 | ||
![]() |
8296788730 | ||
![]() |
31a8dc6f6c | ||
![]() |
b1ac661ba8 | ||
![]() |
63b437ddf9 | ||
![]() |
578675cec8 | ||
![]() |
751c79872f | ||
![]() |
22f26eec68 | ||
![]() |
4ec2016f56 | ||
![]() |
5c71d36330 | ||
![]() |
035096006e | ||
![]() |
1f797208bc | ||
![]() |
aa41fe05ff | ||
![]() |
f4792c834e | ||
![]() |
98ca90aebc | ||
![]() |
991f26d1ae | ||
![]() |
973a7e6de8 | ||
![]() |
528ba74965 | ||
![]() |
73034c163b | ||
![]() |
62ee56e8a3 | ||
![]() |
01471aee6b | ||
![]() |
c004c8b616 | ||
![]() |
0facab231f | ||
![]() |
ca64050f6a | ||
![]() |
91b3afac88 | ||
![]() |
0327ba1dfe | ||
![]() |
67f091f0d9 | ||
![]() |
c09759353f | ||
![]() |
14d72d2703 | ||
![]() |
288298bd2c | ||
![]() |
964f81d3c2 | ||
![]() |
93220f706e | ||
![]() |
fe9275a5d4 | ||
![]() |
0fa829ae77 | ||
![]() |
451db85657 | ||
![]() |
ce1c2b0f05 | ||
![]() |
88c1eae5d4 | ||
![]() |
4af3bc47a2 | ||
![]() |
a257747cba | ||
![]() |
ed2ddec715 | ||
![]() |
748c7e5420 | ||
![]() |
cf70d71ba8 | ||
![]() |
3913c24c19 | ||
![]() |
032a0dba90 | ||
![]() |
d4a8602577 | ||
![]() |
1ce5ecfbd7 | ||
![]() |
bf6ea7b047 | ||
![]() |
49a17de751 | ||
![]() |
95927df455 | ||
![]() |
0f64f1baec | ||
![]() |
46f7737626 | ||
![]() |
7256508983 | ||
![]() |
75a3d179b1 | ||
![]() |
72b14de89e | ||
![]() |
de6eaa1b4e | ||
![]() |
0f4bfda2a1 | ||
![]() |
6c78d9cab2 | ||
![]() |
7970a04025 | ||
![]() |
22c38e5975 | ||
![]() |
3c4cb0d4f3 | ||
![]() |
61b9e8779b | ||
![]() |
2a25e2b572 | ||
![]() |
7db5b1d5d6 | ||
![]() |
10f309273a | ||
![]() |
3e99a12ea2 | ||
![]() |
d5f5d48bb3 | ||
![]() |
6f2019ece9 | ||
![]() |
35a84f02fa | ||
![]() |
f6123ee160 | ||
![]() |
4b02ecddf4 | ||
![]() |
bd39598e61 | ||
![]() |
df9cac172e | ||
![]() |
511c2750c7 | ||
![]() |
b7a81426b0 | ||
![]() |
654b294641 | ||
![]() |
c19a90b74a | ||
![]() |
03d70feb18 | ||
![]() |
bb1216432a | ||
![]() |
d27aab721a | ||
![]() |
3444d40ae2 | ||
![]() |
37f2683d17 | ||
![]() |
3c4f23f64a | ||
![]() |
db8e56b0a5 | ||
![]() |
ff6dfea9b9 | ||
![]() |
2f3ef790e2 | ||
![]() |
01db307f41 | ||
![]() |
d715b725fa | ||
![]() |
52a995a95c | ||
![]() |
b87c025cd3 | ||
![]() |
360eb4278c | ||
![]() |
7b3fc7dee3 | ||
![]() |
f0acbe4310 | ||
![]() |
f0c676d14a | ||
![]() |
13dd198a09 | ||
![]() |
3f6c66d701 | ||
![]() |
dcd6e61f34 | ||
![]() |
ac7b467897 | ||
![]() |
c5adb934c2 | ||
![]() |
1e70a8d6ad | ||
![]() |
755a4054b2 | ||
![]() |
040d747a86 | ||
![]() |
9440894173 | ||
![]() |
4e42e3c2ec | ||
![]() |
662bf113e2 | ||
![]() |
7e11fd62e2 | ||
![]() |
a6c22f2690 | ||
![]() |
4894668ece | ||
![]() |
199133fca4 | ||
![]() |
ea3a3b51a0 | ||
![]() |
23bd3e6104 | ||
![]() |
c72477e67a | ||
![]() |
2d2a4d1908 |
2
.flake8
2
.flake8
@@ -28,7 +28,7 @@ max-line-length = 99
|
||||
# - F821: undefined name `name`
|
||||
#
|
||||
per-file-ignores =
|
||||
var/spack/repos/*/package.py:F403,F405,F821
|
||||
var/spack/*/package.py:F403,F405,F821
|
||||
*-ci-package.py:F403,F405,F821
|
||||
|
||||
# exclude things we usually do not want linting for.
|
||||
|
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -1,4 +1,3 @@
|
||||
*.py diff=python
|
||||
*.lp linguist-language=Prolog
|
||||
lib/spack/external/* linguist-vendored
|
||||
*.bat text eol=crlf
|
||||
*.bat text eol=crlf
|
||||
|
1
.github/workflows/audit.yaml
vendored
1
.github/workflows/audit.yaml
vendored
@@ -59,7 +59,6 @@ jobs:
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
cmake bison bison-devel libstdc++-static gawk
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
run: |
|
||||
|
22
.github/workflows/ci.yaml
vendored
22
.github/workflows/ci.yaml
vendored
@@ -42,17 +42,17 @@ jobs:
|
||||
# built-in repository or documentation
|
||||
filters: |
|
||||
bootstrap:
|
||||
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/python/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
|
45
.github/workflows/prechecks.yml
vendored
45
.github/workflows/prechecks.yml
vendored
@@ -25,14 +25,16 @@ jobs:
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos var/spack/test_repos
|
||||
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
@@ -40,23 +42,20 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
bin/spack style --base HEAD^1
|
||||
bin/spack license verify
|
||||
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
@@ -66,7 +65,11 @@ jobs:
|
||||
python_version: '3.13'
|
||||
|
||||
verify-checksums:
|
||||
if: ${{ inputs.with_packages == 'true' }}
|
||||
# do not run if the commit message or PR description contains [skip-verify-checksums]
|
||||
if: >-
|
||||
${{ inputs.with_packages == 'true' &&
|
||||
!contains(github.event.pull_request.body, '[skip-verify-checksums]') &&
|
||||
!contains(github.event.head_commit.message, '[skip-verify-checksums]') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
@@ -103,21 +106,3 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pylint
|
||||
- name: Pylint (Spack Core)
|
||||
run: |
|
||||
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
@@ -1,7 +1,8 @@
|
||||
black==25.1.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.2
|
||||
clingo==5.8.0
|
||||
flake8==7.2.0
|
||||
isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20250304
|
||||
types-six==1.17.0.20250403
|
||||
vermin==1.6.0
|
||||
pylint==3.3.7
|
||||
|
34
.github/workflows/sync-packages.yaml
vendored
Normal file
34
.github/workflows/sync-packages.yaml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: sync with spack/spack-packages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout spack/spack
|
||||
run: git clone https://github.com/spack/spack.git
|
||||
- name: Checkout spack/spack-packages
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ssh-key: ${{ secrets.SYNC_PACKAGES_KEY }}
|
||||
path: spack-packages
|
||||
repository: spack/spack-packages
|
||||
- name: Install git-filter-repo
|
||||
run: |
|
||||
curl -LfsO https://raw.githubusercontent.com/newren/git-filter-repo/refs/tags/v2.47.0/git-filter-repo
|
||||
echo "67447413e273fc76809289111748870b6f6072f08b17efe94863a92d810b7d94 git-filter-repo" | sha256sum -c -
|
||||
chmod +x git-filter-repo
|
||||
sudo mv git-filter-repo /usr/local/bin/
|
||||
- name: Sync spack/spack-packages with spack/spack
|
||||
run: |
|
||||
cd spack-packages
|
||||
git-filter-repo --quiet --source ../spack --subdirectory-filter var/spack/repos --refs develop
|
||||
- name: Push
|
||||
run: |
|
||||
cd spack-packages
|
||||
git push git@github.com:spack/spack-packages.git develop:develop --force
|
3
.github/workflows/unit_tests.yaml
vendored
3
.github/workflows/unit_tests.yaml
vendored
@@ -19,9 +19,6 @@ jobs:
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-22.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
|
34
README.md
34
README.md
@@ -46,18 +46,42 @@ See the
|
||||
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
||||
for examples and highlights.
|
||||
|
||||
To install spack and your first package, make sure you have Python & Git.
|
||||
Installation
|
||||
----------------
|
||||
|
||||
To install spack, first make sure you have Python & Git.
|
||||
Then:
|
||||
|
||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install zlib
|
||||
```bash
|
||||
git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>What are <code>manyFiles=true</code> and <code>--depth=2</code>?</summary>
|
||||
<br>
|
||||
|
||||
> [!TIP]
|
||||
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
||||
>
|
||||
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
||||
|
||||
</details>
|
||||
|
||||
```bash
|
||||
# For bash/zsh/sh
|
||||
. spack/share/spack/setup-env.sh
|
||||
|
||||
# For tcsh/csh
|
||||
source spack/share/spack/setup-env.csh
|
||||
|
||||
# For fish
|
||||
. spack/share/spack/setup-env.fish
|
||||
```
|
||||
|
||||
```bash
|
||||
# Now you're ready to install a package!
|
||||
spack install zlib-ng
|
||||
```
|
||||
|
||||
Documentation
|
||||
----------------
|
||||
|
||||
|
@@ -90,10 +90,9 @@ config:
|
||||
misc_cache: $user_cache_path/cache
|
||||
|
||||
|
||||
# Timeout in seconds used for downloading sources etc. This only applies
|
||||
# to the connection phase and can be increased for slow connections or
|
||||
# servers. 0 means no timeout.
|
||||
connect_timeout: 10
|
||||
# Abort downloads after this many seconds if not data is received.
|
||||
# Setting this to 0 will disable the timeout.
|
||||
connect_timeout: 30
|
||||
|
||||
|
||||
# If this is false, tools like curl that use SSL will not verify
|
||||
|
@@ -25,6 +25,8 @@ packages:
|
||||
glu: [apple-glu]
|
||||
unwind: [apple-libunwind]
|
||||
uuid: [apple-libuuid]
|
||||
apple-clang:
|
||||
buildable: false
|
||||
apple-gl:
|
||||
buildable: false
|
||||
externals:
|
||||
|
@@ -72,6 +72,8 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cce:
|
||||
buildable: false
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
@@ -86,13 +88,23 @@ packages:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fj:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
glibc:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
iconv:
|
||||
prefer: [libiconv]
|
||||
mpt:
|
||||
buildable: false
|
||||
musl:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
xl:
|
||||
buildable: false
|
||||
|
@@ -11,4 +11,4 @@
|
||||
# ~/.spack/repos.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
@@ -20,3 +20,8 @@ packages:
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
mpi:
|
||||
require:
|
||||
- one_of: [msmpi]
|
||||
msvc:
|
||||
buildable: false
|
||||
|
@@ -1291,55 +1291,61 @@ based on site policies.
|
||||
Variants
|
||||
^^^^^^^^
|
||||
|
||||
Variants are named options associated with a particular package. They are
|
||||
optional, as each package must provide default values for each variant it
|
||||
makes available. Variants can be specified using
|
||||
a flexible parameter syntax ``name=<value>``. For example,
|
||||
``spack install mercury debug=True`` will install mercury built with debug
|
||||
flags. The names of particular variants available for a package depend on
|
||||
Variants are named options associated with a particular package and are
|
||||
typically used to enable or disable certain features at build time. They
|
||||
are optional, as each package must provide default values for each variant
|
||||
it makes available.
|
||||
|
||||
The names of variants available for a particular package depend on
|
||||
what was provided by the package author. ``spack info <package>`` will
|
||||
provide information on what build variants are available.
|
||||
|
||||
For compatibility with earlier versions, variants which happen to be
|
||||
boolean in nature can be specified by a syntax that represents turning
|
||||
options on and off. For example, in the previous spec we could have
|
||||
supplied ``mercury +debug`` with the same effect of enabling the debug
|
||||
compile time option for the libelf package.
|
||||
There are different types of variants:
|
||||
|
||||
Depending on the package a variant may have any default value. For
|
||||
``mercury`` here, ``debug`` is ``False`` by default, and we turned it on
|
||||
with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
|
||||
you can turn it off by either adding ``-name`` or ``~name`` to the spec.
|
||||
1. Boolean variants. Typically used to enable or disable a feature at
|
||||
compile time. For example, a package might have a ``debug`` variant that
|
||||
can be explicitly enabled with ``+debug`` and disabled with ``~debug``.
|
||||
2. Single-valued variants. Often used to set defaults. For example, a package
|
||||
might have a ``compression`` variant that determines the default
|
||||
compression algorithm, which users could set to ``compression=gzip`` or
|
||||
``compression=zstd``.
|
||||
3. Multi-valued variants. A package might have a ``fabrics`` variant that
|
||||
determines which network fabrics to support. Users could set this to
|
||||
``fabrics=verbs,ofi`` to enable both InfiniBand verbs and OpenFabrics
|
||||
interfaces. The values are separated by commas.
|
||||
|
||||
There are two syntaxes here because, depending on context, ``~`` and
|
||||
``-`` may mean different things. In most shells, the following will
|
||||
result in the shell performing home directory substitution:
|
||||
The meaning of ``fabrics=verbs,ofi`` is to enable *at least* the specified
|
||||
fabrics, but other fabrics may be enabled as well. If the intent is to
|
||||
enable *only* the specified fabrics, then the ``fabrics:=verbs,ofi``
|
||||
syntax should be used with the ``:=`` operator.
|
||||
|
||||
.. code-block:: sh
|
||||
.. note::
|
||||
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
In certain shells, the the ``~`` character is expanded to the home
|
||||
directory. To avoid these issues, avoid whitespace between the package
|
||||
name and the variant:
|
||||
|
||||
If there is a user called ``debug``, the ``~`` will be incorrectly
|
||||
expanded. In this situation, you would want to write ``libelf
|
||||
-debug``. However, ``-`` can be ambiguous when included after a
|
||||
package name without spaces:
|
||||
.. code-block:: sh
|
||||
|
||||
.. code-block:: sh
|
||||
mpileaks ~debug # shell may try to substitute this!
|
||||
mpileaks~debug # use this instead
|
||||
|
||||
mpileaks-debug # wrong!
|
||||
mpileaks -debug # right
|
||||
Alternatively, you can use the ``-`` character to disable a variant,
|
||||
but be aware that this requires a space between the package name and
|
||||
the variant:
|
||||
|
||||
Spack allows the ``-`` character to be part of package names, so the
|
||||
above will be interpreted as a request for the ``mpileaks-debug``
|
||||
package, not a request for ``mpileaks`` built without ``debug``
|
||||
options. In this scenario, you should write ``mpileaks~debug`` to
|
||||
avoid ambiguity.
|
||||
.. code-block:: sh
|
||||
|
||||
When spack normalizes specs, it prints them out with no spaces boolean
|
||||
variants using the backwards compatibility syntax and uses only ``~``
|
||||
for disabled boolean variants. The ``-`` and spaces on the command
|
||||
line are provided for convenience and legibility.
|
||||
mpileaks-debug # wrong: refers to a package named "mpileaks-debug"
|
||||
mpileaks -debug # right: refers to a package named mpileaks with debug disabled
|
||||
|
||||
As a last resort, ``debug=False`` can also be used to disable a boolean variant.
|
||||
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
Variant propagation to dependencies
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
Spack allows variants to propagate their value to the package's
|
||||
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
||||
@@ -1409,27 +1415,29 @@ that executables will run without the need to set ``LD_LIBRARY_PATH``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: gcc@4.9.3
|
||||
paths:
|
||||
cc: /opt/gcc/bin/gcc
|
||||
c++: /opt/gcc/bin/g++
|
||||
f77: /opt/gcc/bin/gfortran
|
||||
fc: /opt/gcc/bin/gfortran
|
||||
environment:
|
||||
unset:
|
||||
- BAD_VARIABLE
|
||||
set:
|
||||
GOOD_VARIABLE_NUM: 1
|
||||
GOOD_VARIABLE_STR: good
|
||||
prepend_path:
|
||||
PATH: /path/to/binutils
|
||||
append_path:
|
||||
LD_LIBRARY_PATH: /opt/gcc/lib
|
||||
extra_rpaths:
|
||||
- /path/to/some/compiler/runtime/directory
|
||||
- /path/to/some/other/compiler/runtime/directory
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@4.9.3
|
||||
prefix: /opt/gcc
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/gcc/bin/gcc
|
||||
cxx: /opt/gcc/bin/g++
|
||||
fortran: /opt/gcc/bin/gfortran
|
||||
environment:
|
||||
unset:
|
||||
- BAD_VARIABLE
|
||||
set:
|
||||
GOOD_VARIABLE_NUM: 1
|
||||
GOOD_VARIABLE_STR: good
|
||||
prepend_path:
|
||||
PATH: /path/to/binutils
|
||||
append_path:
|
||||
LD_LIBRARY_PATH: /opt/gcc/lib
|
||||
extra_rpaths:
|
||||
- /path/to/some/compiler/runtime/directory
|
||||
- /path/to/some/other/compiler/runtime/directory
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1908,7 +1916,7 @@ diagnostics. Issues, if found, are reported to stdout:
|
||||
PKG-DIRECTIVES: 1 issue found
|
||||
1. lammps: wrong variant in "conflicts" directive
|
||||
the variant 'adios' does not exist
|
||||
in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
|
||||
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py
|
||||
|
||||
|
||||
------------
|
||||
|
@@ -45,10 +45,14 @@ provided binary cache, which can be a local directory or a remote URL.
|
||||
Here is an example where a build cache is created in a local directory named
|
||||
"spack-cache", to which we push the "ninja" spec:
|
||||
|
||||
ninja-1.12.1-vmvycib6vmiofkdqgrblo7zsvp7odwut
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
==> Selected 30 specs to push to file:///home/spackuser/spack/spack-cache
|
||||
...
|
||||
==> [30/30] Pushed ninja@1.12.1/ngldn2k
|
||||
|
||||
Note that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
@@ -98,9 +102,10 @@ Now you can use list:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache list
|
||||
==> 1 cached build.
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
ninja@1.10.2
|
||||
==> 24 cached builds.
|
||||
-- linux-ubuntu22.04-sapphirerapids / gcc@12.3.0 ----------------
|
||||
[ ... ]
|
||||
ninja@1.12.1
|
||||
|
||||
With ``mymirror`` configured and an index available, Spack will automatically
|
||||
use it during concretization and installation. That means that you can expect
|
||||
@@ -111,17 +116,17 @@ verify by re-installing ninja:
|
||||
|
||||
$ spack uninstall ninja
|
||||
$ spack install ninja
|
||||
==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig
|
||||
gpg: Signature made Do 12 Jan 2023 16:01:04 CET
|
||||
gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6
|
||||
[ ... ]
|
||||
==> Installing ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh [24/24]
|
||||
gpg: Signature made Thu 06 Mar 2025 10:03:38 AM MST
|
||||
gpg: using RSA key 75BC0528114909C076E2607418010FFAD73C9B07
|
||||
gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate]
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache
|
||||
==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s
|
||||
[+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/f0/f08eb62661ad159d2d258890127fc6053f5302a2f490c1c7f7bd677721010ee0
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/c7/c79ac6e40dfdd01ac499b020e52e57aa91151febaea3ad183f90c0f78b64a31a
|
||||
==> Extracting ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh from binary cache
|
||||
==> ninja: Successfully installed ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh
|
||||
Search: 0.00s. Fetch: 0.11s. Install: 0.11s. Extract: 0.10s. Relocate: 0.00s. Total: 0.22s
|
||||
[+] /home/spackuser/spack/opt/spack/linux-ubuntu22.04-sapphirerapids/gcc-12.3.0/ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh
|
||||
|
||||
It worked! You've just completed a full example of creating a build cache with
|
||||
a spec of interest, adding it as a mirror, updating its index, listing the contents,
|
||||
@@ -344,19 +349,18 @@ which lets you get started quickly. See the following resources for more informa
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create tarball of installed Spack package and all dependencies.
|
||||
Tarballs are checksummed and signed if gpg2 is available.
|
||||
Places them in a directory ``build_cache`` that can be copied to a mirror.
|
||||
Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches.
|
||||
Tarballs and specfiles are compressed and checksummed, manifests are signed if gpg2 is available.
|
||||
Commands like ``spack buildcache install`` will search Spack mirrors to get the list of build caches.
|
||||
|
||||
============== ========================================================================================================================
|
||||
Arguments Description
|
||||
============== ========================================================================================================================
|
||||
``<specs>`` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches
|
||||
``-d <path>`` directory in which ``build_cache`` directory is created, defaults to ``.``
|
||||
``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists
|
||||
``-d <path>`` directory in which ``v3`` and ``blobs`` directories are created, defaults to ``.``
|
||||
``-f`` overwrite compressed tarball and spec metadata files if they already exist
|
||||
``-k <key>`` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used.
|
||||
``-r`` make paths in binaries relative before creating tarball
|
||||
``-y`` answer yes to all create unsigned ``build_cache`` questions
|
||||
``-y`` answer yes to all questions about creating unsigned build caches
|
||||
============== ========================================================================================================================
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -397,6 +401,165 @@ List public keys available on Spack mirror.
|
||||
========= ==============================================
|
||||
Arguments Description
|
||||
========= ==============================================
|
||||
``-i`` trust the keys downloaded with prompt for each
|
||||
``-it`` trust the keys downloaded with prompt for each
|
||||
``-y`` answer yes to all trust all keys downloaded
|
||||
========= ==============================================
|
||||
|
||||
.. _build_cache_layout:
|
||||
|
||||
------------------
|
||||
Build Cache Layout
|
||||
------------------
|
||||
|
||||
This section describes the structure and content of URL-style build caches, as
|
||||
distinguished from OCI-style build caches.
|
||||
|
||||
The entry point for a binary package is a manifest json file that points to at
|
||||
least two other files stored as content-addressed blobs. These files include a spec
|
||||
metadata file, as well as the installation directory of the package stored as
|
||||
a compressed archive file. Binary package manifest files are named to indicate
|
||||
the package name and version, as well as the hash of the concrete spec. For
|
||||
example::
|
||||
|
||||
gcc-runtime-12.3.0-qyu2lvgt3nxh7izxycugdbgf5gsdpkjt.spec.manifest.json
|
||||
|
||||
would contain the manifest for a binary package of ``gcc-runtime@12.3.0``.
|
||||
The id of the built package is defined to be the DAG hash of the concrete spec,
|
||||
and exists in the name of the file as well. The id distinguishes a particular
|
||||
binary package from all other binary packages with the same package name and
|
||||
version. Below is an example binary package manifest file. Such a file would
|
||||
live in the versioned spec manifests directory of a binary mirror, for example
|
||||
``v3/manifests/spec/``::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 10731083,
|
||||
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
|
||||
},
|
||||
{
|
||||
"contentLength": 1000,
|
||||
"mediaType": "application/vnd.spack.spec.v5+json",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
The manifest points to both the compressed tar file as well as the compressed
|
||||
spec metadata file, and contains the checksum of each. This checksum
|
||||
is also used as the address of the associated file, and hence, must be
|
||||
known in order to locate the tarball or spec file within the mirror. Once the
|
||||
tarball or spec metadata file is downloaded, the checksum should be computed locally
|
||||
and compared to the checksum in the manifest to ensure the contents have not changed
|
||||
since the binary package was pushed. Spack stores all data files (including compressed
|
||||
tar files, spec metadata, indices, public keys, etc) within a ``blobs/<hash-algorithm>/``
|
||||
directory, using the first two characters of the checksum as a sub-directory
|
||||
to reduce the number files in a single folder. Here is a depiction of the
|
||||
organization of binary mirror contents::
|
||||
|
||||
mirror_directory/
|
||||
v3/
|
||||
layout.json
|
||||
manifests/
|
||||
spec/
|
||||
gcc-runtime/
|
||||
gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json
|
||||
gmake/
|
||||
gmake-4.4.1-lpr4j77rcgkg5536tmiuzwzlcjsiomph.spec.manifest.json
|
||||
compiler-wrapper/
|
||||
compiler-wrapper-1.0-s7ieuyievp57vwhthczhaq2ogowf3ohe.spec.manifest.json
|
||||
index/
|
||||
index.manifest.json
|
||||
key/
|
||||
75BC0528114909C076E2607418010FFAD73C9B07.key.manifest.json
|
||||
keys.manifest.json
|
||||
blobs/
|
||||
sha256/
|
||||
0f/
|
||||
0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210
|
||||
fb/
|
||||
fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041
|
||||
2a/
|
||||
2a21836d206ccf0df780ab0be63fdf76d24501375306a35daa6683c409b7922f
|
||||
...
|
||||
|
||||
Files within the ``manifests`` directory are organized into subdirectories by
|
||||
the type of entity they represent. Binary package manifests live in the ``spec/``
|
||||
directory, binary cache index manifests live in the ``index/`` directory, and
|
||||
manifests for public keys and their indices live in the ``key/`` subdirectory.
|
||||
Regardless of the type of entity they represent, all manifest files are named
|
||||
with an extension ``.manifest.json``.
|
||||
|
||||
Every manifest contains a ``data`` array, each element of which refers to an
|
||||
associated file stored a content-addressed blob. Considering the example spec
|
||||
manifest shown above, the compressed installation archive can be found by
|
||||
picking out the data blob with the appropriate ``mediaType``, which in this
|
||||
case would be ``application/vnd.spack.install.v1.tar+gzip``. The associated
|
||||
file is found by looking in the blobs directory under ``blobs/sha256/fb/`` for
|
||||
the file named with the complete checksum value.
|
||||
|
||||
As mentioned above, every entity in a binary mirror (aka build cache) is stored
|
||||
as a content-addressed blob pointed to by a manifest. While an example spec
|
||||
manifest (i.e. a manifest for a binary package) is shown above, here is what
|
||||
the manifest of a build cache index looks like::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 6411,
|
||||
"mediaType": "application/vnd.spack.db.v8+json",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "225a3e9da24d201fdf9d8247d66217f5b3f4d0fc160db1498afd998bfd115234"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Some things to note about this manifest are that it points to a blob that is not
|
||||
compressed (``compression: "none"``), and that the ``mediaType`` is one we have
|
||||
not seen yet, ``application/vnd.spack.db.v8+json``. The decision not to compress
|
||||
build cache indices stems from the fact that spack does not yet sign build cache
|
||||
index manifests. Once that changes, you may start to see these indices stored as
|
||||
compressed blobs.
|
||||
|
||||
For completeness, here are examples of manifests for the other two types of entities
|
||||
you might find in a spack build cache. First a public key manifest::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 2472,
|
||||
"mediaType": "application/pgp-keys",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "9fc18374aebc84deb2f27898da77d4d4410e5fb44c60c6238cb57fb36147e5c7"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Note the ``mediaType`` of ``application/pgp-keys``. Finally, a public key index manifest::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 56,
|
||||
"mediaType": "application/vnd.spack.keyindex.v1+json",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "29b3a0eb6064fd588543bc43ac7d42d708a69058dafe4be0859e3200091a9a1c"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Again note the ``mediaType`` of ``application/vnd.spack.keyindex.v1+json``. Also note
|
||||
that both the above manifest examples refer to uncompressed blobs, this is for the same
|
||||
reason spack does not yet compress build cache index blobs.
|
||||
|
@@ -63,7 +63,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/cudapackage
|
||||
build_systems/custompackage
|
||||
build_systems/inteloneapipackage
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
|
||||
@@ -84,7 +83,7 @@ packages. You can quickly find examples by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd var/spack/repos/builtin/packages
|
||||
$ cd var/spack/repos/spack_repo/builtin/packages
|
||||
$ grep -l QMakePackage */package.py
|
||||
|
||||
|
||||
|
@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
|
@@ -199,7 +199,7 @@ a variant to control this:
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
|
||||
focuses mostly on how Spack's build systems work.
|
||||
|
||||
In this guide, we will be using the
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_
|
||||
packages as examples. ``perl``'s build system is a hand-written
|
||||
``Configure`` shell script, while ``cmake`` bootstraps itself during
|
||||
installation. Both of these packages require custom build systems.
|
||||
|
@@ -33,9 +33,6 @@ For more information on a specific package, do::
|
||||
|
||||
spack info --all <package-name>
|
||||
|
||||
Intel no longer releases new versions of Parallel Studio, which can be
|
||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||
now be found in oneAPI.
|
||||
|
||||
Examples
|
||||
========
|
||||
@@ -50,34 +47,8 @@ Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Note that 2024 and later releases do not include ``icc``. Before 2024,
|
||||
the package layout was different::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
The ``intel-oneapi-compilers`` package includes 2 families of
|
||||
compilers:
|
||||
|
||||
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
|
||||
compilers. 2024 and later releases contain ``ifort``, but not
|
||||
``icc`` and ``icpc``.
|
||||
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
|
||||
compilers based on LLVM.
|
||||
|
||||
To build the ``patchelf`` Spack package with ``icc``, do::
|
||||
|
||||
spack install patchelf%intel
|
||||
|
||||
To build with with ``icx``, do ::
|
||||
To build the ``patchelf`` Spack package with ``icx``, do::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
@@ -92,15 +63,6 @@ Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||
|
||||
git clone https://github.com/spack/spack-configs
|
||||
@@ -149,7 +111,7 @@ Compilers
|
||||
---------
|
||||
|
||||
To use the compilers, add some information about the installation to
|
||||
``compilers.yaml``. For most users, it is sufficient to do::
|
||||
``packages.yaml``. For most users, it is sufficient to do::
|
||||
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/bin
|
||||
|
||||
@@ -157,7 +119,7 @@ Adapt the paths above if you did not install the tools in the default
|
||||
location. After adding the compilers, using them is the same
|
||||
as if you had installed the ``intel-oneapi-compilers`` package.
|
||||
Another option is to manually add the configuration to
|
||||
``compilers.yaml`` as described in :ref:`Compiler configuration
|
||||
``packages.yaml`` as described in :ref:`Compiler configuration
|
||||
<compiler-config>`.
|
||||
|
||||
Before 2024, the directory structure was different::
|
||||
@@ -200,15 +162,5 @@ You can also use Spack-installed libraries. For example::
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with oneMKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
||||
This section describes basic use of oneAPI, especially if it has
|
||||
changed compared to Parallel Studio. See :ref:`intelpackage` for more
|
||||
information on :ref:`intel-virtual-packages`,
|
||||
:ref:`intel-unrelated-packages`,
|
||||
:ref:`intel-integrating-external-libraries`, and
|
||||
:ref:`using-mkl-tips`.
|
||||
|
||||
|
||||
.. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -91,14 +91,14 @@ there are any other variables you need to set, you can do this in the
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("PREFIX", prefix)
|
||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_
|
||||
is a good example of a simple package that does this, while
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_
|
||||
is a good example of a more complex package.
|
||||
|
||||
""""""""""""""""""""""
|
||||
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
|
||||
]
|
||||
|
||||
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_
|
||||
is a good example of a package that uses this strategy.
|
||||
|
||||
"""""""""""""
|
||||
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
|
||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||
|
||||
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_
|
||||
is a good example of a package that involves editing a Makefile to set
|
||||
the appropriate variables.
|
||||
|
||||
@@ -192,7 +192,7 @@ well for storing variables:
|
||||
inc.write(f"{key} = {config[key]}\n")
|
||||
|
||||
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_
|
||||
is a good example of a package that uses a dictionary to store
|
||||
configuration variables.
|
||||
|
||||
@@ -213,7 +213,7 @@ them in a list:
|
||||
inc.write(f"{var}\n")
|
||||
|
||||
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_
|
||||
is a good example of a package that uses a list to store
|
||||
configuration variables.
|
||||
|
||||
|
@@ -12,8 +12,7 @@ The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPacka
|
||||
it provides standard variants, dependencies, and conflicts to facilitate building
|
||||
packages using GPUs though for AMD in this case.
|
||||
|
||||
You can find the source for this package (and suggestions for setting up your
|
||||
``compilers.yaml`` and ``packages.yaml`` files) at
|
||||
You can find the source for this package (and suggestions for setting up your ``packages.yaml`` file) at
|
||||
`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/rocm.py>`__.
|
||||
|
||||
^^^^^^^^
|
||||
|
@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
|
||||
If it isn't on CRAN, try Bioconductor, another common R repository.
|
||||
|
||||
For the purposes of this tutorial, we will be walking through
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_
|
||||
as an example. If you search for "CRAN caret", you will quickly find what
|
||||
you are looking for at https://cran.r-project.org/package=caret.
|
||||
https://cran.r-project.org is the main CRAN website. However, CRAN also
|
||||
@@ -337,7 +337,7 @@ Non-R dependencies
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some packages depend on non-R libraries for linking. Check out the
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_
|
||||
package for an example: https://cloud.r-project.org/package=stringi.
|
||||
If you search for the text "SystemRequirements", you will see:
|
||||
|
||||
@@ -352,7 +352,7 @@ Passing arguments to the installation
|
||||
|
||||
Some R packages provide additional flags that can be passed to
|
||||
``R CMD INSTALL``, often to locate non-R dependencies.
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_
|
||||
is an example of this, and flags for linking to an MPI library. To pass
|
||||
these to the installation command, you can override ``configure_args``
|
||||
like so:
|
||||
|
@@ -104,10 +104,10 @@ Finding available options
|
||||
|
||||
The first place to start when looking for a list of valid options to
|
||||
build a package is ``scons --help``. Some packages like
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_
|
||||
don't bother overwriting the default SCons help message, so this isn't
|
||||
very useful, but other packages like
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_
|
||||
print a list of valid command-line variables:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
|
||||
|
||||
|
||||
More advanced packages like
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_
|
||||
use ``scons --help`` to print a list of subcommands:
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -225,8 +225,14 @@ def setup(sphinx):
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
("py:class", "llnl.util.lang.KT"),
|
||||
("py:class", "llnl.util.lang.VT"),
|
||||
("py:class", "llnl.util.lang.K"),
|
||||
("py:class", "llnl.util.lang.V"),
|
||||
("py:class", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.KT"),
|
||||
("py:obj", "llnl.util.lang.VT"),
|
||||
("py:obj", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.K"),
|
||||
("py:obj", "llnl.util.lang.V"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -148,15 +148,16 @@ this can expose you to attacks. Use at your own risk.
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to an absolute file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||
in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
in the subprocess calling ``curl``. If additional ``curl`` arguments are required,
|
||||
they can be set in the config, e.g. ``url_fetch_method:'curl -k -q'``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||
|
@@ -11,7 +11,7 @@ Configuration Files
|
||||
Spack has many configuration files. Here is a quick list of them, in
|
||||
case you want to skip directly to specific docs:
|
||||
|
||||
* :ref:`compilers.yaml <compiler-config>`
|
||||
* :ref:`packages.yaml <compiler-config>`
|
||||
* :ref:`concretizer.yaml <concretizer-options>`
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`include.yaml <include-yaml>`
|
||||
@@ -46,6 +46,12 @@ Each Spack configuration file is nested under a top-level section
|
||||
corresponding to its name. So, ``config.yaml`` starts with ``config:``,
|
||||
``mirrors.yaml`` starts with ``mirrors:``, etc.
|
||||
|
||||
.. tip::
|
||||
|
||||
Validation and autocompletion of Spack config files can be enabled in
|
||||
your editor with the YAML language server. See `spack/schemas
|
||||
<https://github.com/spack/schemas>`_ for more information.
|
||||
|
||||
.. _configuration-scopes:
|
||||
|
||||
--------------------
|
||||
@@ -95,7 +101,7 @@ are six configuration scopes. From lowest to highest:
|
||||
precedence over all other scopes.
|
||||
|
||||
Each configuration directory may contain several configuration files,
|
||||
such as ``config.yaml``, ``compilers.yaml``, or ``mirrors.yaml``. When
|
||||
such as ``config.yaml``, ``packages.yaml``, or ``mirrors.yaml``. When
|
||||
configurations conflict, settings from higher-precedence scopes override
|
||||
lower-precedence settings.
|
||||
|
||||
|
@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
|
||||
|
||||
Modified files:
|
||||
|
||||
var/spack/repos/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/builtin/packages/hdf/package.py
|
||||
var/spack/repos/builtin/packages/netcdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py
|
||||
=======================================================
|
||||
Flake8 checks were clean.
|
||||
|
||||
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
Flake8 found errors.
|
||||
|
||||
Most of the error messages are straightforward, but if you don't understand what
|
||||
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
|
||||
|
||||
.. warning::
|
||||
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_.
|
||||
If you're using a ``python`` from Spack and you installed
|
||||
``py-sphinx`` and friends, you need to make them available to your
|
||||
``python``. The easiest way to do this is to run:
|
||||
|
@@ -154,9 +154,7 @@ Package-related modules
|
||||
|
||||
:mod:`spack.util.naming`
|
||||
Contains functions for mapping between Spack package names,
|
||||
Python module names, and Python class names. Functions like
|
||||
:func:`~spack.util.naming.mod_to_class` handle mapping package
|
||||
module names to class names.
|
||||
Python module names, and Python class names.
|
||||
|
||||
:mod:`spack.directives`
|
||||
*Directives* are functions that can be called inside a package definition
|
||||
|
34
lib/spack/docs/env_vars_yaml.rst
Normal file
34
lib/spack/docs/env_vars_yaml.rst
Normal file
@@ -0,0 +1,34 @@
|
||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _env-vars-yaml:
|
||||
|
||||
=============================================
|
||||
Environment Variable Settings (env_vars.yaml)
|
||||
=============================================
|
||||
|
||||
Spack allows you to include shell environment variable modifications
|
||||
for a spack environment by including an ``env_vars.yaml``. Environment
|
||||
varaibles can be modified by setting, unsetting, appending, and prepending
|
||||
variables in the shell environment.
|
||||
The changes to the shell environment will take effect when the spack
|
||||
environment is activated.
|
||||
|
||||
for example,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
|
@@ -539,7 +539,9 @@ from the command line.
|
||||
|
||||
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||
absolute path to the independent environments.
|
||||
absolute path to the independent environments. Note, that you may use Spack
|
||||
config variables such as ``$spack`` or environment variables as long as the
|
||||
expression expands to an absolute path.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -549,7 +551,7 @@ absolute path to the independent environments.
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /absolute/path/to/environment1
|
||||
- /absolute/path/to/environment2
|
||||
- $spack/../path/to/environment2
|
||||
|
||||
|
||||
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||
@@ -667,11 +669,11 @@ a ``packages.yaml`` file) could contain:
|
||||
# ...
|
||||
packages:
|
||||
all:
|
||||
compiler: [intel]
|
||||
providers:
|
||||
mpi: [openmpi]
|
||||
# ...
|
||||
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
This configuration sets the default mpi provider to be openmpi.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Included configurations
|
||||
@@ -686,7 +688,8 @@ the environment.
|
||||
spack:
|
||||
include:
|
||||
- environment/relative/path/to/config.yaml
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
||||
- /absolute/path/to/packages.yaml
|
||||
- path: /path/to/$os/$target/environment
|
||||
optional: true
|
||||
@@ -700,11 +703,11 @@ with the ``optional`` clause and conditional with the ``when`` clause. (See
|
||||
|
||||
Files are listed using paths to individual files or directories containing them.
|
||||
Path entries may be absolute or relative to the environment or specified as
|
||||
URLs. URLs to individual files need link to the **raw** form of the file's
|
||||
URLs. URLs to individual files must link to the **raw** form of the file's
|
||||
contents (e.g., `GitHub
|
||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||
or `GitLab
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_) **and** include a valid sha256 for the file.
|
||||
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
||||
supported. Spack-specific, environment and user path variables can be used.
|
||||
(See :ref:`config-file-variables` for more information.)
|
||||
@@ -999,6 +1002,28 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
-------------------------------
|
||||
Modifying Environment Variables
|
||||
-------------------------------
|
||||
|
||||
Spack Environments can modify the active shell's environment variables when activated. The environment can be
|
||||
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
|
||||
file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
-----------------
|
||||
Environment Views
|
||||
|
@@ -1,161 +0,0 @@
|
||||
spack:
|
||||
definitions:
|
||||
- compiler-pkgs:
|
||||
- 'llvm+clang@6.0.1 os=centos7'
|
||||
- 'gcc@6.5.0 os=centos7'
|
||||
- 'llvm+clang@6.0.1 os=ubuntu18.04'
|
||||
- 'gcc@6.5.0 os=ubuntu18.04'
|
||||
- pkgs:
|
||||
- readline@7.0
|
||||
# - xsdk@0.4.0
|
||||
- compilers:
|
||||
- '%gcc@5.5.0'
|
||||
- '%gcc@6.5.0'
|
||||
- '%gcc@7.3.0'
|
||||
- '%clang@6.0.0'
|
||||
- '%clang@6.0.1'
|
||||
- oses:
|
||||
- os=ubuntu18.04
|
||||
- os=centos7
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- [$pkgs]
|
||||
- [$compilers]
|
||||
- [$oses]
|
||||
exclude:
|
||||
- '%gcc@7.3.0 os=centos7'
|
||||
- '%gcc@5.5.0 os=ubuntu18.04'
|
||||
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
|
||||
compilers:
|
||||
# The .gitlab-ci.yml for this project picks a Docker container which does
|
||||
# not have any compilers pre-built and ready to use, so we need to fake the
|
||||
# existence of those here.
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@5.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@7.3.0
|
||||
target: x86_64
|
||||
|
||||
gitlab-ci:
|
||||
bootstrap:
|
||||
- name: compiler-pkgs
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
- # spack-cloud-ubuntu
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
- os=ubuntu18.04
|
||||
runner-attributes:
|
||||
# 'tags' and 'image' go directly onto the job, 'variables' will
|
||||
# be added to what we already necessarily create for the job as
|
||||
# a part of the CI workflow
|
||||
tags:
|
||||
- spack-k8s
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_ubuntu_18.04
|
||||
entrypoint: [""]
|
||||
- # spack-cloud-centos
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
- 'os=centos7'
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-k8s
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_centos_7
|
||||
entrypoint: [""]
|
||||
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: http://cdash
|
||||
project: Spack Testing
|
||||
site: Spack Docker-Compose Workflow
|
||||
|
||||
repos: []
|
||||
upstreams: {}
|
||||
modules:
|
||||
enable: []
|
||||
packages: {}
|
||||
config: {}
|
@@ -131,7 +131,7 @@ creates a simple python file:
|
||||
It doesn't take much python coding to get from there to a working
|
||||
package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:lines: 5-
|
||||
|
||||
Spack also provides wrapper functions around common commands like
|
||||
|
@@ -254,12 +254,11 @@ directory.
|
||||
Compiler configuration
|
||||
----------------------
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``, but for
|
||||
convenience Spack will automatically detect compilers the first time
|
||||
it needs them.
|
||||
Spack has the ability to build packages with multiple compilers and compiler versions.
|
||||
Compilers can be made available to Spack by specifying them manually in ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``.
|
||||
For convenience, Spack will automatically detect compilers the first time it needs them,
|
||||
if none is available.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -274,16 +273,11 @@ compilers`` or ``spack compiler list``:
|
||||
|
||||
$ spack compilers
|
||||
==> Available compilers
|
||||
-- gcc ---------------------------------------------------------
|
||||
gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
|
||||
gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
|
||||
-- intel -------------------------------------------------------
|
||||
intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
|
||||
intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
|
||||
intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- gcc ubuntu20.04-x86_64 ---------------------------------------
|
||||
gcc@9.4.0 gcc@8.4.0 gcc@10.5.0
|
||||
|
||||
-- llvm ubuntu20.04-x86_64 --------------------------------------
|
||||
llvm@12.0.0 llvm@11.0.0 llvm@10.0.0
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -302,16 +296,22 @@ An alias for ``spack compiler find``.
|
||||
``spack compiler find``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Lists the compilers currently available to Spack. If you do not see
|
||||
a compiler in this list, but you want to use it with Spack, you can
|
||||
simply run ``spack compiler find`` with the path to where the
|
||||
compiler is installed. For example:
|
||||
If you do not see a compiler in the list shown by:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler find /usr/local/tools/ic-13.0.079
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
intel@13.0.079
|
||||
$ spack compiler list
|
||||
|
||||
but you want to use it with Spack, you can simply run ``spack compiler find`` with the
|
||||
path to where the compiler is installed. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler find /opt/intel/oneapi/compiler/2025.1/bin/
|
||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
||||
intel-oneapi-compilers@2025.1.0
|
||||
==> Compilers are defined in the following files:
|
||||
/home/user/.spack/packages.yaml
|
||||
|
||||
Or you can run ``spack compiler find`` with no arguments to force
|
||||
auto-detection. This is useful if you do not know where compilers are
|
||||
@@ -322,7 +322,7 @@ installed, but you know that new compilers have been added to your
|
||||
|
||||
$ module load gcc/4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
==> Added 1 new compiler to /home/user/.spack/packages.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
This loads the environment module for gcc-4.9.0 to add it to
|
||||
@@ -331,7 +331,7 @@ This loads the environment module for gcc-4.9.0 to add it to
|
||||
.. note::
|
||||
|
||||
By default, spack does not fill in the ``modules:`` field in the
|
||||
``compilers.yaml`` file. If you are using a compiler from a
|
||||
``packages.yaml`` file. If you are using a compiler from a
|
||||
module, then you should add this field manually.
|
||||
See the section on :ref:`compilers-requiring-modules`.
|
||||
|
||||
@@ -341,91 +341,82 @@ This loads the environment module for gcc-4.9.0 to add it to
|
||||
``spack compiler info``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to see specifics on a particular compiler, you can run
|
||||
``spack compiler info`` on it:
|
||||
If you want to see additional information on some specific compilers, you can run ``spack compiler info`` on it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack compiler info intel@15
|
||||
intel@15.0.0:
|
||||
paths:
|
||||
cc = /usr/local/bin/icc-15.0.090
|
||||
cxx = /usr/local/bin/icpc-15.0.090
|
||||
f77 = /usr/local/bin/ifort-15.0.090
|
||||
fc = /usr/local/bin/ifort-15.0.090
|
||||
modules = []
|
||||
operating_system = centos6
|
||||
...
|
||||
$ spack compiler info gcc
|
||||
gcc@=8.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc-8
|
||||
cxx: /usr/bin/g++-8
|
||||
fortran: /usr/bin/gfortran-8
|
||||
|
||||
This shows which C, C++, and Fortran compilers were detected by Spack.
|
||||
Notice also that we didn't have to be too specific about the
|
||||
version. We just said ``intel@15``, and information about the only
|
||||
matching Intel compiler was displayed.
|
||||
gcc@=9.4.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
fortran: /usr/bin/gfortran
|
||||
|
||||
gcc@=10.5.0 languages='c,c++,fortran' arch=linux-ubuntu20.04-x86_64:
|
||||
prefix: /usr
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
|
||||
This shows the details of the compilers that were detected by Spack.
|
||||
Notice also that we didn't have to be too specific about the version. We just said ``gcc``, and we got information
|
||||
about all the matching compilers.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Manual compiler configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If auto-detection fails, you can manually configure a compiler by
|
||||
editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
|
||||
``spack config edit compilers``, which will open the file in
|
||||
If auto-detection fails, you can manually configure a compiler by editing your ``~/.spack/packages.yaml`` file.
|
||||
You can do this by running ``spack config edit packages``, which will open the file in
|
||||
:ref:`your favorite editor <controlling-the-editor>`.
|
||||
|
||||
Each compiler configuration in the file looks like this:
|
||||
Each compiler has an "external" entry in the file with some ``extra_attributes``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/local/bin/icc-15.0.024-beta
|
||||
cxx: /usr/local/bin/icpc-15.0.024-beta
|
||||
f77: /usr/local/bin/ifort-15.0.024-beta
|
||||
fc: /usr/local/bin/ifort-15.0.024-beta
|
||||
spec: intel@15.0.0
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
|
||||
For compilers that do not support Fortran (like ``clang``), put
|
||||
``None`` for ``f77`` and ``fc``:
|
||||
The compiler executables are listed under ``extra_attributes:compilers``, and are keyed by language.
|
||||
Once you save the file, the configured compilers will show up in the list displayed by ``spack compilers``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
spec: clang@3.3svn
|
||||
|
||||
Once you save the file, the configured compilers will show up in the
|
||||
list displayed by ``spack compilers``.
|
||||
|
||||
You can also add compiler flags to manually configured compilers. These
|
||||
flags should be specified in the ``flags`` section of the compiler
|
||||
specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
||||
You can also add compiler flags to manually configured compilers. These flags should be specified in the
|
||||
``flags`` section of the compiler specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
|
||||
``cppflags``, ``ldflags``, and ``ldlibs``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
flags:
|
||||
cflags: -O3 -fPIC
|
||||
cxxflags: -O3 -fPIC
|
||||
cppflags: -O3 -fPIC
|
||||
spec: gcc@4.7.2
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/gcc-10
|
||||
cxx: /usr/bin/g++-10
|
||||
fortran: /usr/bin/gfortran-10
|
||||
flags:
|
||||
cflags: -O3 -fPIC
|
||||
cxxflags: -O3 -fPIC
|
||||
cppflags: -O3 -fPIC
|
||||
|
||||
These flags will be treated by spack as if they were entered from
|
||||
the command line each time this compiler is used. The compiler wrappers
|
||||
@@ -440,95 +431,44 @@ These variables should be specified in the ``environment`` section of the compil
|
||||
specification. The operations available to modify the environment are ``set``, ``unset``,
|
||||
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
spec: oneapi@latest
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
.. note::
|
||||
|
||||
Spack is in the process of moving compilers from a separate
|
||||
attribute to be handled like all other packages. As part of this
|
||||
process, the ``compilers.yaml`` section will eventually be replaced
|
||||
by configuration in the ``packages.yaml`` section. This new
|
||||
configuration is now available, although it is not yet the default
|
||||
behavior.
|
||||
|
||||
Compilers can also be configured as external packages in the
|
||||
``packages.yaml`` config file. Any external package for a compiler
|
||||
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||
assuming the paths to the compiler executables are determinable from
|
||||
the prefix.
|
||||
|
||||
If the paths to the compiler executable are not determinable from the
|
||||
prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||
all other fields from the compilers config can be added to the
|
||||
``extra_attributes`` field for an external representing a compiler.
|
||||
|
||||
Note that the format for the ``paths`` field in the
|
||||
``extra_attributes`` section is different than in the ``compilers``
|
||||
config. For compilers configured as external packages, the section is
|
||||
named ``compilers`` and the dictionary maps language names (``c``,
|
||||
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||
``fc``, and ``f77``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
external:
|
||||
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
environment:
|
||||
set:
|
||||
GCC_ROOT: /usr
|
||||
external:
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
intel-oneapi-compilers:
|
||||
externals:
|
||||
- spec: intel-oneapi-compilers@2025.1.0
|
||||
prefix: /opt/intel/oneapi
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
c: /opt/intel/oneapi/compiler/2025.1/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/2025.1/bin/icpx
|
||||
fortran: /opt/intel/oneapi/compiler/2025.1/bin/ifx
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you are particular about which compiler/version you use, you might
|
||||
wish to have Spack build it for you. For example:
|
||||
If you are particular about which compiler/version you use, you might wish to have Spack build it for you.
|
||||
For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install gcc@4.9.3
|
||||
$ spack install gcc@14+binutils
|
||||
|
||||
Once that has finished, you will need to add it to your
|
||||
``compilers.yaml`` file. You can then set Spack to use it by default
|
||||
by adding the following to your ``packages.yaml`` file:
|
||||
Once the compiler is installed, you can start using it without additional configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: console
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@4.9.3]
|
||||
$ spack install hdf5~mpi %gcc@14
|
||||
|
||||
The same holds true for compilers that are made available from buildcaches, when reusing them is allowed.
|
||||
|
||||
.. _compilers-requiring-modules:
|
||||
|
||||
@@ -536,30 +476,26 @@ by adding the following to your ``packages.yaml`` file:
|
||||
Compilers Requiring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Many installed compilers will work regardless of the environment they
|
||||
are called with. However, some installed compilers require
|
||||
``$LD_LIBRARY_PATH`` or other environment variables to be set in order
|
||||
to run; this is typical for Intel and other proprietary compilers.
|
||||
Many installed compilers will work regardless of the environment they are called with.
|
||||
However, some installed compilers require environment variables to be set in order to run;
|
||||
this is typical for Intel and other proprietary compilers.
|
||||
|
||||
In such a case, you should tell Spack which module(s) to load in order
|
||||
to run the chosen compiler (If the compiler does not come with a
|
||||
module file, you might consider making one by hand). Spack will load
|
||||
this module into the environment ONLY when the compiler is run, and
|
||||
NOT in general for a package's ``install()`` method. See, for
|
||||
example, this ``compilers.yaml`` file:
|
||||
On typical HPC clusters, these environment modifications are usually delegated to some "module" system.
|
||||
In such a case, you should tell Spack which module(s) to load in order to run the chosen compiler:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [other/comp/gcc-5.3-sp3]
|
||||
operating_system: SuSE11
|
||||
paths:
|
||||
cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc
|
||||
cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++
|
||||
f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
spec: gcc@5.3.0
|
||||
packages:
|
||||
gcc:
|
||||
externals:
|
||||
- spec: gcc@10.5.0 languages='c,c++,fortran'
|
||||
prefix: /opt/compilers
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /opt/compilers/bin/gcc-10
|
||||
cxx: /opt/compilers/bin/g++-10
|
||||
fortran: /opt/compilers/bin/gfortran-10
|
||||
modules: [gcc/10.5.0]
|
||||
|
||||
Some compilers require special environment settings to be loaded not just
|
||||
to run, but also to execute the code they build, breaking packages that
|
||||
@@ -580,7 +516,7 @@ Licensed Compilers
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some proprietary compilers require licensing to use. If you need to
|
||||
use a licensed compiler (eg, PGI), the process is similar to a mix of
|
||||
use a licensed compiler, the process is similar to a mix of
|
||||
build your own, plus modules:
|
||||
|
||||
#. Create a Spack package (if it doesn't exist already) to install
|
||||
@@ -590,24 +526,21 @@ build your own, plus modules:
|
||||
using Spack to load the module it just created, and running simple
|
||||
builds (eg: ``cc helloWorld.c && ./a.out``)
|
||||
|
||||
#. Add the newly-installed compiler to ``compilers.yaml`` as shown
|
||||
above.
|
||||
#. Add the newly-installed compiler to ``packages.yaml`` as shown above.
|
||||
|
||||
.. _mixed-toolchains:
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Mixed Toolchains
|
||||
^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Fortran compilers on macOS
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Modern compilers typically come with related compilers for C, C++ and
|
||||
Fortran bundled together. When possible, results are best if the same
|
||||
compiler is used for all languages.
|
||||
|
||||
In some cases, this is not possible. For example, starting with macOS El
|
||||
Capitan (10.11), many packages no longer build with GCC, but XCode
|
||||
provides no Fortran compilers. The user is therefore forced to use a
|
||||
mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for
|
||||
Fortran.
|
||||
In some cases, this is not possible. For example, XCode on macOS provides no Fortran compilers.
|
||||
The user is therefore forced to use a mixed toolchain: XCode-provided Clang for C/C++ and e.g.
|
||||
GNU ``gfortran`` for Fortran.
|
||||
|
||||
#. You need to make sure that Xcode is installed. Run the following command:
|
||||
|
||||
@@ -660,45 +593,25 @@ Fortran.
|
||||
|
||||
Note: the flag is ``-license``, not ``--license``.
|
||||
|
||||
#. Run ``spack compiler find`` to locate Clang.
|
||||
|
||||
#. There are different ways to get ``gfortran`` on macOS. For example, you can
|
||||
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
|
||||
gcc``), or from a `DMG installer
|
||||
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
|
||||
|
||||
#. The only thing left to do is to edit ``~/.spack/darwin/compilers.yaml`` to provide
|
||||
the path to ``gfortran``:
|
||||
#. Run ``spack compiler find`` to locate both Apple-Clang and GCC.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
# ...
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: /path/to/bin/gfortran
|
||||
fc: /path/to/bin/gfortran
|
||||
spec: apple-clang@11.0.0
|
||||
|
||||
|
||||
If you used Spack to install GCC, you can get the installation prefix by
|
||||
``spack location -i gcc`` (this will only work if you have a single version
|
||||
of GCC installed). Whereas for Homebrew, GCC is installed in
|
||||
``/usr/local/Cellar/gcc/x.y.z``. With the DMG installer, the correct path
|
||||
will be ``/usr/local/gfortran``.
|
||||
Since languages in Spack are modeled as virtual packages, ``apple-clang`` will be used to provide
|
||||
C and C++, while GCC will be used for Fortran.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Compiler Verification
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can verify that your compilers are configured properly by installing a
|
||||
simple package. For example:
|
||||
You can verify that your compilers are configured properly by installing a simple package. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib%gcc@5.3.0
|
||||
$ spack install zlib-ng%gcc@5.3.0
|
||||
|
||||
|
||||
.. _vendor-specific-compiler-configuration:
|
||||
@@ -707,9 +620,7 @@ simple package. For example:
|
||||
Vendor-Specific Compiler Configuration
|
||||
--------------------------------------
|
||||
|
||||
With Spack, things usually "just work" with GCC. Not so for other
|
||||
compilers. This section provides details on how to get specific
|
||||
compilers working.
|
||||
This section provides details on how to get vendor-specific compilers working.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Intel Compilers
|
||||
@@ -731,8 +642,8 @@ compilers:
|
||||
you have installed from the ``PATH`` environment variable.
|
||||
|
||||
If you want use a version of ``gcc`` or ``g++`` other than the default
|
||||
version on your system, you need to use either the ``-gcc-name``
|
||||
or ``-gxx-name`` compiler option to specify the path to the version of
|
||||
version on your system, you need to use either the ``--gcc-install-dir``
|
||||
or ``--gcc-toolchain`` compiler option to specify the path to the version of
|
||||
``gcc`` or ``g++`` that you want to use."
|
||||
|
||||
-- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_
|
||||
@@ -740,76 +651,12 @@ compilers:
|
||||
Intel compilers may therefore be configured in one of two ways with
|
||||
Spack: using modules, or using compiler flags.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Configuration with Modules
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
One can control which GCC is seen by the Intel compiler with modules.
|
||||
A module must be loaded both for the Intel Compiler (so it will run)
|
||||
and GCC (so the compiler can find the intended GCC). The following
|
||||
configuration in ``compilers.yaml`` illustrates this technique:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [gcc-4.9.3, intel-15.0.24]
|
||||
operating_system: centos7
|
||||
paths:
|
||||
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
The version number on the Intel compiler is a combination of
|
||||
the "native" Intel version number and the GNU compiler it is
|
||||
targeting.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Command Line Configuration
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
One can also control which GCC is seen by the Intel compiler by adding
|
||||
flags to the ``icc`` command:
|
||||
|
||||
#. Identify the location of the compiler you just installed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack location --install-dir gcc
|
||||
~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw...
|
||||
|
||||
#. Set up ``compilers.yaml``, for example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [intel-15.0.24]
|
||||
operating_system: centos7
|
||||
paths:
|
||||
cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
|
||||
cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
|
||||
f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
|
||||
flags:
|
||||
cflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||
cxxflags: -gxx-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/g++
|
||||
fflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
|
||||
The Numerical Algorithms Group provides a licensed Fortran compiler. Like Clang,
|
||||
this requires you to set up a :ref:`mixed-toolchains`. It is recommended to use
|
||||
GCC for your C/C++ compilers.
|
||||
The Numerical Algorithms Group provides a licensed Fortran compiler.
|
||||
It is recommended to use GCC for your C/C++ compilers.
|
||||
|
||||
The NAG Fortran compilers are a bit more strict than other compilers, and many
|
||||
packages will fail to install with error messages like:
|
||||
@@ -826,44 +673,40 @@ the command line:
|
||||
|
||||
$ spack install openmpi fflags="-mismatch"
|
||||
|
||||
Or it can be set permanently in your ``compilers.yaml``:
|
||||
Or it can be set permanently in your ``packages.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/gcc
|
||||
cxx: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/g++
|
||||
f77: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||
fc: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
|
||||
flags:
|
||||
fflags: -mismatch
|
||||
spec: nag@6.1
|
||||
|
||||
packages:
|
||||
nag:
|
||||
externals:
|
||||
- spec: nag@6.1
|
||||
prefix: /opt/nag/bin
|
||||
extra_attributes:
|
||||
compilers:
|
||||
fortran: /opt/nag/bin/nagfor
|
||||
flags:
|
||||
fflags: -mismatch
|
||||
|
||||
---------------
|
||||
System Packages
|
||||
---------------
|
||||
|
||||
Once compilers are configured, one needs to determine which
|
||||
pre-installed system packages, if any, to use in builds. This is
|
||||
configured in the file ``~/.spack/packages.yaml``. For example, to use
|
||||
an OpenMPI installed in /opt/local, one would use:
|
||||
Once compilers are configured, one needs to determine which pre-installed system packages,
|
||||
if any, to use in builds. These are also configured in the ``~/.spack/packages.yaml`` file.
|
||||
For example, to use an OpenMPI installed in /opt/local, one would use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: openmpi@1.10.1
|
||||
prefix: /opt/local
|
||||
buildable: False
|
||||
packages:
|
||||
openmpi:
|
||||
buildable: False
|
||||
externals:
|
||||
- spec: openmpi@1.10.1
|
||||
prefix: /opt/local
|
||||
|
||||
In general, Spack is easier to use and more reliable if it builds all of
|
||||
its own dependencies. However, there are several packages for which one
|
||||
commonly needs to use system versions:
|
||||
In general, *Spack is easier to use and more reliable if it builds all of its own dependencies*.
|
||||
However, there are several packages for which one commonly needs to use system versions:
|
||||
|
||||
^^^
|
||||
MPI
|
||||
@@ -876,8 +719,7 @@ you are unlikely to get a working MPI from Spack. Instead, use an
|
||||
appropriate pre-installed MPI.
|
||||
|
||||
If you choose a pre-installed MPI, you should consider using the
|
||||
pre-installed compiler used to build that MPI; see above on
|
||||
``compilers.yaml``.
|
||||
pre-installed compiler used to build that MPI.
|
||||
|
||||
^^^^^^^
|
||||
OpenSSL
|
||||
@@ -1441,9 +1283,9 @@ To configure Spack, first run the following command inside the Spack console:
|
||||
spack compiler find
|
||||
|
||||
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
||||
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
|
||||
containing a ``packages.yaml`` file. On a fresh Windows install with the above packages
|
||||
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
||||
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
|
||||
compiler will be integrated within the first version of MSVC present in the ``packages.yaml``
|
||||
output.
|
||||
|
||||
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
||||
|
@@ -23,7 +23,6 @@ components for use by dependent packages:
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
@@ -70,16 +69,15 @@ This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
packages:
|
||||
llvm-amdgpu:
|
||||
externals:
|
||||
- spec: llvm-amdgpu@=5.3.0
|
||||
prefix: /opt/rocm-5.3.0
|
||||
compilers:
|
||||
c: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
fortran: null
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
|
@@ -43,6 +43,20 @@ or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protoco
|
||||
schemes) are supported. Spack-specific, environment and user path variables
|
||||
can be used. (See :ref:`config-file-variables` for more information.)
|
||||
|
||||
A ``sha256`` is required for remote file URLs and must be specified as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
include:
|
||||
- path: https://github.com/path/to/raw/config/compilers.yaml
|
||||
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
|
||||
|
||||
Additionally, remote file URLs must link to the **raw** form of the file's
|
||||
contents (e.g., `GitHub
|
||||
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||
or `GitLab
|
||||
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||
|
||||
.. warning::
|
||||
|
||||
Recursive includes are not currently processed in a breadth-first manner
|
||||
|
@@ -75,6 +75,7 @@ or refer to the full manual below.
|
||||
packages_yaml
|
||||
build_settings
|
||||
environments
|
||||
env_vars_yaml
|
||||
containers
|
||||
mirrors
|
||||
module_file_support
|
||||
|
@@ -128,7 +128,7 @@ depend on the spec:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
if self.spec.satisfies("+foo"):
|
||||
env.set("FOO", "bar")
|
||||
|
||||
@@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None:
|
||||
if dependent_spec.package.extends(self.spec):
|
||||
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
||||
|
||||
|
@@ -557,14 +557,13 @@ preferences.
|
||||
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
||||
|
||||
|
||||
Most package preferences (``compilers``, ``target`` and ``providers``)
|
||||
The ``target`` and ``providers`` preferences
|
||||
can only be set globally under the ``all`` section of ``packages.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
|
||||
target: [x86_64_v3]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
@@ -443,7 +443,7 @@ lives in:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack location -p gmp
|
||||
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
|
||||
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py
|
||||
|
||||
but ``spack edit`` provides a much simpler shortcut and saves you the
|
||||
trouble of typing the full path.
|
||||
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
|
||||
handles creating package files for you, so you can skip most of the
|
||||
details here.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/spack_repo/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A Spack installation directory is structured like a standard UNIX
|
||||
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
||||
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``.
|
||||
|
||||
If you ``cd`` to that directory, you will see directories for each
|
||||
package:
|
||||
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls
|
||||
:shell:
|
||||
:ellipsis: 10
|
||||
|
||||
@@ -479,7 +479,7 @@ package lives in:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
|
||||
Alongside the ``package.py`` file, a package may contain extra
|
||||
directories or files (like patches) that it needs to build.
|
||||
@@ -492,12 +492,12 @@ Packages are named after the directory containing ``package.py``. So,
|
||||
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
||||
The ``package.py`` file defines a class called ``Libelf``, which
|
||||
extends Spack's ``Package`` class. For example, here is
|
||||
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
|
||||
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
class Libelf(Package):
|
||||
""" ... description ... """
|
||||
@@ -520,7 +520,7 @@ these:
|
||||
$ spack install libelf@0.8.13
|
||||
|
||||
Spack sees the package name in the spec and looks for
|
||||
``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
|
||||
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``.
|
||||
Likewise, if you run ``spack install py-numpy``, Spack looks for
|
||||
``py-numpy/package.py``.
|
||||
|
||||
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
|
||||
In order to handle this, you can define a ``url_for_version()`` function
|
||||
like so:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.url_for_version
|
||||
|
||||
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
|
||||
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
|
||||
takes care of all of the plumbing and requires packagers to just define a proper
|
||||
``gnu_mirror_path`` attribute:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py
|
||||
:lines: 9-18
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1089,7 +1089,7 @@ You've already seen the ``homepage`` and ``url`` package attributes:
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Mpich(Package):
|
||||
@@ -1995,7 +1995,7 @@ structure like this:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/
|
||||
mvapich2/
|
||||
package.py
|
||||
ad_lustre_rwcontig_open_source.patch
|
||||
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
|
||||
|
||||
.. _pyside-patch:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py
|
||||
:pyobject: PyPyside.patch
|
||||
:linenos:
|
||||
|
||||
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
|
||||
|
||||
$ spack resource show 3877ab54
|
||||
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch
|
||||
applies to: builtin.m4
|
||||
|
||||
``spack resource show`` looks up downloadable resources from package
|
||||
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
|
||||
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
|
||||
$ spack resource show b37164268
|
||||
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch
|
||||
applies to: builtin.boost
|
||||
patched by: builtin.dealii
|
||||
|
||||
@@ -2930,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
@@ -2958,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
@@ -3785,7 +3785,7 @@ It is usually sufficient for a packager to override a few
|
||||
build system specific helper methods or attributes to provide, for instance,
|
||||
configure arguments:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py
|
||||
:pyobject: M4.configure_args
|
||||
:linenos:
|
||||
|
||||
@@ -4110,7 +4110,7 @@ Shell command functions
|
||||
|
||||
Recall the install method from ``libelf``:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:pyobject: Libelf.install
|
||||
:linenos:
|
||||
|
||||
@@ -4901,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
|
||||
additional properties on it to help you out. E.g., in openmpi, you'll
|
||||
find this:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.setup_dependent_package
|
||||
|
||||
That code allows the ``openmpi`` package to associate an ``mpicc`` property
|
||||
@@ -6001,16 +6001,16 @@ with those implemented in the package itself.
|
||||
* - Parent/Provider Package
|
||||
- Stand-alone Tests
|
||||
* - `C
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_
|
||||
- Compiles ``hello.c`` and runs it
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortran
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_
|
||||
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
|
||||
* - :ref:`PythonPackage <pythonpackage>`
|
||||
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
|
||||
@@ -6031,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
One example of a package that adds its own stand-alone tests to those
|
||||
"inherited" by the virtual package it provides an implementation for is
|
||||
the `Openmpi package
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_.
|
||||
|
||||
Below are snippets from running and viewing the stand-alone test results
|
||||
for ``openmpi``:
|
||||
@@ -6183,7 +6183,7 @@ running:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
This is already part of the boilerplate for packages created with
|
||||
``spack create``.
|
||||
|
@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
|
||||
=================================
|
||||
|
||||
Spack comes with thousands of built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a
|
||||
directory that Spack searches when it needs to find a package by name.
|
||||
You may need to maintain packages for restricted, proprietary or
|
||||
experimental software separately from the built-in repository. Spack
|
||||
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
The file starts with ``repos:`` and contains a single ordered list of
|
||||
paths to repositories. Each path is on a separate line starting with
|
||||
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /opt/local-repo
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /opt/repos/spack_repo/local_repo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
|
||||
it searches these repositories in order (first to last) to resolve each
|
||||
package name. In this example, Spack will look for the following
|
||||
packages and use the first valid file:
|
||||
|
||||
1. ``/opt/local-repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
|
||||
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py``
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -101,14 +101,15 @@ Namespaces
|
||||
|
||||
Every repository in Spack has an associated **namespace** defined in its
|
||||
top-level ``repo.yaml`` file. If you look at
|
||||
``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
see that its namespace is ``builtin``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cat var/spack/repos/builtin/repo.yaml
|
||||
$ cat var/spack/repos/spack_repo/builtin/repo.yaml
|
||||
repo:
|
||||
namespace: builtin
|
||||
api: v2.0
|
||||
|
||||
Spack records the repository namespace of each installed package. For
|
||||
example, if you install the ``mpich`` package from the ``builtin`` repo,
|
||||
@@ -217,15 +218,15 @@ Suppose you have three repositories: the builtin Spack repo
|
||||
repo containing your own prototype packages (``proto``). Suppose they
|
||||
contain packages as follows:
|
||||
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+====================================+=============================+
|
||||
| ``proto`` | ``~/proto`` | ``mpich`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+===============================================+=============================+
|
||||
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
|
||||
Suppose that ``hdf5`` depends on ``mpich``. You can override the
|
||||
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
@@ -233,8 +234,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
|
||||
|
||||
@@ -243,9 +244,9 @@ If, instead, ``repos.yaml`` looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- ~/proto
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- ~/my_spack_repos/spack_repo/proto
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
|
||||
|
||||
@@ -326,8 +327,8 @@ files, use ``spack repo list``.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
myrepo ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Each repository is listed with its associated namespace. To get the raw,
|
||||
merged YAML from all configuration files, use ``spack config get repos``:
|
||||
@@ -335,9 +336,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config get repos
|
||||
repos:srepos:
|
||||
- ~/myrepo
|
||||
- $spack/var/spack/repos/builtin
|
||||
repos:
|
||||
- ~/my_spack_repos/spack_repo/myrepo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Note that, unlike ``spack repo list``, this does not include the
|
||||
namespace, which is read from each repo's ``repo.yaml``.
|
||||
@@ -351,66 +352,54 @@ yourself; you can use the ``spack repo create`` command.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo
|
||||
$ spack repo create ~/my_spack_repos myrepo
|
||||
==> Created repo with namespace 'myrepo'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
$ ls ~/my_spack_repos/spack_repo/myrepo
|
||||
packages/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: 'myrepo'
|
||||
api: v2.0
|
||||
|
||||
By default, the namespace of a new repo matches its directory's name.
|
||||
You can supply a custom namespace with a second argument, e.g.:
|
||||
Namespaces can also be nested, which can be useful if you have
|
||||
multiple package repositories for an organization. Spack will
|
||||
create the corresponding directory structure for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo llnl.comp
|
||||
$ spack repo create ~/my_spack_repos llnl.comp
|
||||
==> Created repo with namespace 'llnl.comp'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
|
||||
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
|
||||
repo:
|
||||
namespace: 'llnl.comp'
|
||||
|
||||
You can also create repositories with custom structure with the ``-d/--subdirectory``
|
||||
argument, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create -d applications myrepo apps
|
||||
==> Created repo with namespace 'apps'.
|
||||
==> To register it with Spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
applications/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: apps
|
||||
subdirectory: applications
|
||||
api: v2.0
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack repo add``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once your repository is created, you can register it with Spack with
|
||||
``spack repo add``:
|
||||
``spack repo add``. You nee to specify the path to the directory that
|
||||
contains the ``repo.yaml`` file.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo add ./myrepo
|
||||
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Added repo with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
llnl.comp ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
|
||||
This simply adds the repo to your ``repos.yaml`` file.
|
||||
|
||||
@@ -432,46 +421,43 @@ By namespace:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm llnl.comp
|
||||
==> Removed repository ~/myrepo with namespace 'llnl.comp'.
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
By path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm ~/myrepo
|
||||
==> Removed repository ~/myrepo
|
||||
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
--------------------------------
|
||||
Repo namespaces and Python
|
||||
--------------------------------
|
||||
|
||||
You may have noticed that namespace notation for repositories is similar
|
||||
to the notation for namespaces in Python. As it turns out, you *can*
|
||||
treat Spack repositories like Python packages; this is how they are
|
||||
implemented.
|
||||
Package repositories are implemented as Python packages. To be precise,
|
||||
they are `namespace packages
|
||||
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_
|
||||
with ``spack_repo`` the top-level namespace, followed by the repository
|
||||
namespace as submodules. For example, the builtin repository corresponds
|
||||
to the Python module ``spack_repo.builtin.packages``.
|
||||
|
||||
You could, for example, extend a ``builtin`` package in your own
|
||||
This structure allows you to extend a ``builtin`` package in your own
|
||||
repository:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.builtin.mpich import Mpich
|
||||
from spack_repo.builtin.packages.mpich.package import Mpich
|
||||
|
||||
class MyPackage(Mpich):
|
||||
...
|
||||
|
||||
Spack repo namespaces are actually Python namespaces tacked on under
|
||||
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
|
||||
implemented using Python's built-in `sys.path
|
||||
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
Spack populates ``sys.path`` at runtime with the path to the root of your
|
||||
package repository's ``spack_repo`` directory.
|
||||
|
@@ -5,9 +5,9 @@ sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.27.1
|
||||
docutils==0.21.2
|
||||
pygments==2.19.1
|
||||
urllib3==2.3.0
|
||||
urllib3==2.4.0
|
||||
pytest==8.3.5
|
||||
isort==6.0.1
|
||||
black==25.1.0
|
||||
flake8==7.1.2
|
||||
flake8==7.2.0
|
||||
mypy==1.11.1
|
||||
|
@@ -176,92 +176,72 @@ community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
.. _build_cache_signing:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
-------------------
|
||||
Build Cache Signing
|
||||
-------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
For an in-depth description of the layout of a binary mirror, see
|
||||
the :ref:`documentation<build_cache_layout>` covering binary caches. The
|
||||
key takeaway from that discussion that applies here is that the entry point
|
||||
to a binary package is it's manifest. The manifest refers unambiguously to the
|
||||
spec metadata and compressed archive, which are stored as content-addressed
|
||||
blobs.
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
The manifest files can either be signed or unsigned, but are always given
|
||||
a name ending with ``.spec.manifest.json`` regardless. The difference between
|
||||
signed and unsigned manifests is simply that the signed version is wrapped in
|
||||
a gpg cleartext signature, as illustrated below::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
}
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 10731083,
|
||||
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
|
||||
},
|
||||
{
|
||||
"contentLength": 1000,
|
||||
"mediaType": "application/vnd.spack.spec.v5+json",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
|
||||
iQGzBAEBCgAdFiEEdbwFKBFJCcB24mB0GAEP+tc8mwcFAmf2rr4ACgkQGAEP+tc8
|
||||
mwfefwv+KJs8MsQ5ovFaBdmyx5H/3k4rO4QHBzuSPOB6UaxErA9IyOB31iP6vNTU
|
||||
HzYpxz6F5dJCJWmmNEMN/0+vjhMHEOkqd7M1l5reVcxduTF2yc4tBZUO2gienEHL
|
||||
W0e+SnUznl1yc/aVpChUiahO2zToCsI8HZRNT4tu6iCnE/OpghqjsSdBOZHmSNDD
|
||||
5wuuCxfDUyWI6ZlLclaaB7RdbCUUJf/iqi711J+wubvnDFhc6Ynwm1xai5laJ1bD
|
||||
ev3NrSb2AAroeNFVo4iECA0fZC1OZQYzaRmAEhBXtCideGJ5Zf2Cp9hmCwNK8Hq6
|
||||
bNt94JP9LqC3FCCJJOMsPyOOhMSA5MU44zyyzloRwEQpHHLuFzVdbTHA3dmTc18n
|
||||
HxNLkZoEMYRc8zNr40g0yb2lCbc+P11TtL1E+5NlE34MX15mPewRCiIFTMwhCnE3
|
||||
gFSKtW1MKustZE35/RUwd2mpJRf+mSRVCl1f1RiFjktLjz7vWQq7imIUSam0fPDr
|
||||
XD4aDogm
|
||||
=RrFX
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
used to sign the above manifest file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
$ gpg --verify gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
When attempting to install a binary package that has been signed, spack will
|
||||
attempt to verify the signature with one of the trusted keys in its keyring,
|
||||
and will fail if unable to do so. While not recommended, it is possible to
|
||||
force installation of a signed package without verification by providing the
|
||||
``--no-check-signature`` argument to ``spack install ...``.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
@@ -320,10 +300,10 @@ the following way:
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
4. Assuming the package has dependencies those spec manifests are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
5. The package is built and the spec manifest is generated
|
||||
6. The spec manifest is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
@@ -376,24 +356,24 @@ following way:
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
5. The job script syncs all spec manifest files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
signatures on the spec.manifest.json files. If any signed manifest
|
||||
does not verify, the job immediately fails.
|
||||
8. Assuming all manifests are verified, the ``sign.sh`` script then unpacks
|
||||
the manifest json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
10. The private key is then used to sign each of the manifests and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
11. The re-signed manifests are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
|
13
lib/spack/external/__init__.py
vendored
13
lib/spack/external/__init__.py
vendored
@@ -11,6 +11,7 @@
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.3
|
||||
* License: MIT
|
||||
|
||||
archspec
|
||||
--------
|
||||
@@ -18,6 +19,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
* License: Apache-2.0 or MIT
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -25,6 +27,7 @@
|
||||
* Homepage: https://github.com/simonpercivall/astunparse
|
||||
* Usage: Unparsing Python ASTs for package hashes in Spack
|
||||
* Version: 1.6.3 (plus modifications)
|
||||
* License: PSF-2.0
|
||||
* Note: This is in ``spack.util.unparse`` because it's very heavily
|
||||
modified, and we want to track coverage for it.
|
||||
Specifically, we have modified this library to generate consistent unparsed ASTs
|
||||
@@ -41,6 +44,7 @@
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 22.1.0
|
||||
* License: MIT
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
@@ -48,6 +52,7 @@
|
||||
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
|
||||
* Usage: Functions to parse build logs and extract error messages.
|
||||
* Version: Unversioned
|
||||
* License: BSD-3-Clause
|
||||
* Note: This is a homemade port of Kitware's CTest build handler.
|
||||
|
||||
distro
|
||||
@@ -56,6 +61,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/distro
|
||||
* Usage: Provides a more stable linux distribution detection.
|
||||
* Version: 1.8.0
|
||||
* License: Apache-2.0
|
||||
|
||||
jinja2
|
||||
------
|
||||
@@ -63,6 +69,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||
* Usage: A modern and designer-friendly templating language for Python.
|
||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
jsonschema
|
||||
----------
|
||||
@@ -70,6 +77,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/jsonschema
|
||||
* Usage: An implementation of JSON Schema for Python.
|
||||
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
||||
* License: MIT
|
||||
* Note: We don't include tests or benchmarks; just what Spack needs.
|
||||
|
||||
macholib
|
||||
@@ -78,6 +86,7 @@
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.16.2
|
||||
* License: MIT
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
@@ -85,6 +94,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
@@ -92,6 +102,7 @@
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.18.0
|
||||
* License: MIT
|
||||
|
||||
ruamel.yaml
|
||||
------
|
||||
@@ -101,6 +112,7 @@
|
||||
actively maintained and has more features, including round-tripping
|
||||
comments read from config files.
|
||||
* Version: 0.17.21
|
||||
* License: MIT
|
||||
|
||||
six
|
||||
---
|
||||
@@ -108,5 +120,6 @@
|
||||
* Homepage: https://pypi.python.org/pypi/six
|
||||
* Usage: Python 2 and 3 compatibility utilities.
|
||||
* Version: 1.16.0
|
||||
* License: MIT
|
||||
|
||||
"""
|
||||
|
@@ -764,7 +764,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src), errno.ENOENT)
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
|
@@ -15,7 +15,20 @@
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -424,46 +437,39 @@ def add_func_to_class(name, func):
|
||||
return cls
|
||||
|
||||
|
||||
K = TypeVar("K")
|
||||
V = TypeVar("V")
|
||||
|
||||
|
||||
@lazy_lexicographic_ordering
|
||||
class HashableMap(collections.abc.MutableMapping):
|
||||
class HashableMap(typing.MutableMapping[K, V]):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
__slots__ = ("dict",)
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {}
|
||||
self.dict: Dict[K, V] = {}
|
||||
|
||||
def __getitem__(self, key):
|
||||
def __getitem__(self, key: K) -> V:
|
||||
return self.dict[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
def __setitem__(self, key: K, value: V) -> None:
|
||||
self.dict[key] = value
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[K]:
|
||||
return iter(self.dict)
|
||||
|
||||
def __len__(self):
|
||||
def __len__(self) -> int:
|
||||
return len(self.dict)
|
||||
|
||||
def __delitem__(self, key):
|
||||
def __delitem__(self, key: K) -> None:
|
||||
del self.dict[key]
|
||||
|
||||
def _cmp_iter(self):
|
||||
for _, v in sorted(self.items()):
|
||||
yield v
|
||||
|
||||
def copy(self):
|
||||
"""Type-agnostic clone method. Preserves subclass type."""
|
||||
# Construct a new dict of my type
|
||||
self_type = type(self)
|
||||
clone = self_type()
|
||||
|
||||
# Copy everything from this dict into it.
|
||||
for key in self:
|
||||
clone[key] = self[key].copy()
|
||||
return clone
|
||||
|
||||
|
||||
def match_predicate(*args):
|
||||
"""Utility function for making string matching predicates.
|
||||
@@ -1047,19 +1053,28 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
return True
|
||||
|
||||
|
||||
class classproperty:
|
||||
ClassPropertyType = TypeVar("ClassPropertyType")
|
||||
|
||||
|
||||
class classproperty(Generic[ClassPropertyType]):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
the evaluation is injected at creation time and takes an owner (i.e., the class that
|
||||
originated the instance).
|
||||
"""
|
||||
|
||||
def __init__(self, callback):
|
||||
def __init__(self, callback: Callable[[Any], ClassPropertyType]) -> None:
|
||||
self.callback = callback
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
def __get__(self, instance, owner) -> ClassPropertyType:
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
#: A type alias that represents either a classproperty descriptor or a constant value of the same
|
||||
#: type. This allows derived classes to override a computed class-level property with a constant
|
||||
#: value while retaining type compatibility.
|
||||
ClassProperty = Union[ClassPropertyType, classproperty[ClassPropertyType]]
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
|
@@ -18,7 +18,7 @@
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
package_api_version = (2, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compiler-classic": "intel",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
@@ -15,6 +15,6 @@
|
||||
"clang": "llvm",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel": "intel-oneapi-compiler-classic",
|
||||
"intel": "intel-oneapi-compilers-classic",
|
||||
"arm": "acfl",
|
||||
}
|
||||
|
@@ -350,7 +350,7 @@ def _ensure_no_folders_without_package_py(error_cls):
|
||||
for repository in spack.repo.PATH.repos:
|
||||
missing = []
|
||||
for entry in os.scandir(repository.packages_path):
|
||||
if not entry.is_dir():
|
||||
if not entry.is_dir() or entry.name == "__pycache__":
|
||||
continue
|
||||
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
|
||||
if not package_py.exists():
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -133,7 +133,7 @@ def mypy_root_spec() -> str:
|
||||
|
||||
def black_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:24.1.0")
|
||||
return _root_spec("py-black@:25.1.0")
|
||||
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
|
@@ -36,9 +36,11 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
@@ -572,12 +574,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
|
||||
if sys.platform == "win32":
|
||||
module.nmake = Executable("nmake")
|
||||
module.msbuild = Executable("msbuild")
|
||||
module.nmake = DeprecatedExecutable(pkg.name, "nmake", "msvc")
|
||||
module.msbuild = DeprecatedExecutable(pkg.name, "msbuild", "msvc")
|
||||
# analog to configure for win32
|
||||
module.cscript = Executable("cscript")
|
||||
|
||||
@@ -1189,11 +1189,9 @@ def _setup_pkg_and_run(
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
"This might be due to a change in Spack's package format "
|
||||
"to support multiple build-systems for a single package. You can fix this "
|
||||
"by updating the {} recipe, and you can also report the issue as a bug. "
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
"The '{}' package cannot find an attribute while trying to {}. You can fix this "
|
||||
"by updating the {} recipe, and you can also report the issue as a build-error or "
|
||||
"a bug at https://github.com/spack/spack/issues"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
@@ -1218,15 +1216,45 @@ def _setup_pkg_and_run(
|
||||
input_pipe.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
class BuildProcess:
|
||||
def __init__(self, *, target, args) -> None:
|
||||
self.p = multiprocessing.Process(target=target, args=args)
|
||||
|
||||
def start(self) -> None:
|
||||
self.p.start()
|
||||
|
||||
def is_alive(self) -> bool:
|
||||
return self.p.is_alive()
|
||||
|
||||
def join(self, *, timeout: Optional[int] = None):
|
||||
self.p.join(timeout=timeout)
|
||||
|
||||
def terminate(self):
|
||||
# Opportunity for graceful termination
|
||||
self.p.terminate()
|
||||
self.p.join(timeout=1)
|
||||
|
||||
# If the process didn't gracefully terminate, forcefully kill
|
||||
if self.p.is_alive():
|
||||
# TODO (python 3.6 removal): use self.p.kill() instead, consider removing this class
|
||||
assert isinstance(self.p.pid, int), f"unexpected value for PID: {self.p.pid}"
|
||||
os.kill(self.p.pid, signal.SIGKILL)
|
||||
self.p.join()
|
||||
|
||||
@property
|
||||
def exitcode(self):
|
||||
return self.p.exitcode
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs, *, timeout: Optional[int] = None):
|
||||
"""Create a child process to do part of a spack build.
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
function (typing.Callable): argless function to run in the child process.
|
||||
timeout: maximum time allowed to finish the execution of function
|
||||
|
||||
Usage::
|
||||
|
||||
@@ -1254,14 +1282,14 @@ def child_fun():
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
mflags = os.environ.get("MAKEFLAGS")
|
||||
if mflags is not None:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
p = BuildProcess(
|
||||
target=_setup_pkg_and_run,
|
||||
args=(
|
||||
serialized_pkg,
|
||||
@@ -1295,14 +1323,17 @@ def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
p.join(timeout=timeout)
|
||||
if p.is_alive():
|
||||
warnings.warn(f"Terminating process, since the timeout of {timeout}s was exceeded")
|
||||
p.terminate()
|
||||
p.join()
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
if isinstance(child_result, spack.error.StopPhase):
|
||||
# do not print
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -846,7 +847,9 @@ def _remove_libtool_archives(self) -> None:
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||
# Many configure files rely on matching '10.*' for macOS version
|
||||
# detection and fail to add flags if it shows as version 11.
|
||||
|
@@ -2,9 +2,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import enum
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -13,6 +14,7 @@
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import depends_on
|
||||
from spack.util.executable import which_string
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -178,6 +180,64 @@ def initconfig_compiler_entries(self):
|
||||
|
||||
return entries
|
||||
|
||||
class Scheduler(enum.Enum):
|
||||
LSF = enum.auto()
|
||||
SLURM = enum.auto()
|
||||
FLUX = enum.auto()
|
||||
|
||||
def get_scheduler(self) -> Optional[Scheduler]:
|
||||
spec = self.pkg.spec
|
||||
|
||||
# Check for Spectrum-mpi, which always uses LSF or LSF MPI variant
|
||||
if spec.satisfies("^spectrum-mpi") or spec["mpi"].satisfies("schedulers=lsf"):
|
||||
return self.Scheduler.LSF
|
||||
|
||||
# Check for Slurm MPI variants
|
||||
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
|
||||
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
|
||||
return self.Scheduler.SLURM
|
||||
|
||||
# TODO improve this when MPI implementations support flux
|
||||
# Do this check last to avoid using a flux wrapper present next to Slurm/ LSF schedulers
|
||||
if which_string("flux") is not None:
|
||||
return self.Scheduler.FLUX
|
||||
|
||||
return None
|
||||
|
||||
def get_mpi_exec(self) -> Optional[str]:
|
||||
spec = self.pkg.spec
|
||||
scheduler = self.get_scheduler()
|
||||
|
||||
if scheduler == self.Scheduler.LSF:
|
||||
return which_string("lrun")
|
||||
|
||||
elif scheduler == self.Scheduler.SLURM:
|
||||
if spec["mpi"].external:
|
||||
return which_string("srun")
|
||||
else:
|
||||
return os.path.join(spec["slurm"].prefix.bin, "srun")
|
||||
|
||||
elif scheduler == self.Scheduler.FLUX:
|
||||
flux = which_string("flux")
|
||||
return f"{flux};run" if flux else None
|
||||
|
||||
elif hasattr(spec["mpi"].package, "mpiexec"):
|
||||
return spec["mpi"].package.mpiexec
|
||||
|
||||
else:
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
||||
if not os.path.exists(mpiexec):
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
||||
return mpiexec
|
||||
|
||||
def get_mpi_exec_num_proc(self) -> str:
|
||||
scheduler = self.get_scheduler()
|
||||
|
||||
if scheduler in [self.Scheduler.FLUX, self.Scheduler.LSF, self.Scheduler.SLURM]:
|
||||
return "-n"
|
||||
else:
|
||||
return "-np"
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
spec = self.pkg.spec
|
||||
|
||||
@@ -197,27 +257,10 @@ def initconfig_mpi_entries(self):
|
||||
if hasattr(spec["mpi"], "mpifc"):
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# Check for slurm
|
||||
using_slurm = False
|
||||
slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
|
||||
if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
|
||||
using_slurm = True
|
||||
|
||||
# Determine MPIEXEC
|
||||
if using_slurm:
|
||||
if spec["mpi"].external:
|
||||
# Heuristic until we have dependents on externals
|
||||
mpiexec = "/usr/bin/srun"
|
||||
else:
|
||||
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
|
||||
elif hasattr(spec["mpi"].package, "mpiexec"):
|
||||
mpiexec = spec["mpi"].package.mpiexec
|
||||
else:
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
||||
if not os.path.exists(mpiexec):
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
||||
mpiexec = self.get_mpi_exec()
|
||||
|
||||
if not os.path.exists(mpiexec):
|
||||
if mpiexec is None or not os.path.exists(mpiexec.split(";")[0]):
|
||||
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name
|
||||
entries.append("# {0}\n".format(msg))
|
||||
tty.warn(msg)
|
||||
@@ -230,10 +273,7 @@ def initconfig_mpi_entries(self):
|
||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||
|
||||
# Determine MPIEXEC_NUMPROC_FLAG
|
||||
if using_slurm:
|
||||
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-n"))
|
||||
else:
|
||||
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", "-np"))
|
||||
entries.append(cmake_cache_string("MPIEXEC_NUMPROC_FLAG", self.get_mpi_exec_num_proc()))
|
||||
|
||||
return entries
|
||||
|
||||
@@ -276,30 +316,18 @@ def initconfig_hardware_entries(self):
|
||||
entries.append("# ROCm")
|
||||
entries.append("#------------------{0}\n".format("-" * 30))
|
||||
|
||||
if spec.satisfies("^blt@0.7:"):
|
||||
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
||||
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
||||
else:
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
# others point to /<path>/rocm-<ver>/llvm
|
||||
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||
entries.append(
|
||||
cmake_cache_filepath(
|
||||
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
|
||||
)
|
||||
)
|
||||
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
||||
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
||||
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
entries.append(
|
||||
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++"))
|
||||
)
|
||||
|
||||
if spec.satisfies("%gcc"):
|
||||
entries.append(
|
||||
@@ -308,6 +336,15 @@ def initconfig_hardware_entries(self):
|
||||
)
|
||||
)
|
||||
|
||||
# Extra definitions that might be required in other cases
|
||||
if not spec.satisfies("^blt"):
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -86,7 +87,9 @@ def check_args(self):
|
||||
"""Argument for ``cargo test`` during check phase"""
|
||||
return []
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(
|
||||
|
@@ -36,7 +36,7 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
|
||||
#: Compiler argument(s) that produces version information
|
||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||
compiler_version_argument: Union[str, Tuple[str, ...]] = "-dumpversion"
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
compiler_version_regex: str = "(.*)"
|
||||
@@ -47,6 +47,11 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Relative path to compiler wrappers
|
||||
compiler_wrapper_link_paths: Dict[str, str] = {}
|
||||
|
||||
#: Optimization flags
|
||||
opt_flags: Sequence[str] = []
|
||||
#: Flags for generating debug information
|
||||
debug_flags: Sequence[str] = []
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -68,7 +69,9 @@ class GoBuilder(BuilderWithDefaults):
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["check"]
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
@@ -23,6 +23,7 @@
|
||||
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1016,7 +1017,7 @@ def libs(self):
|
||||
debug_print(result)
|
||||
return result
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
@@ -1049,11 +1050,13 @@ def setup_run_environment(self, env):
|
||||
env.set("F77", self.prefix.bin.ifort)
|
||||
env.set("F90", self.prefix.bin.ifort)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self, env: EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
# NB: This function is overwritten by 'mpi' provider packages:
|
||||
#
|
||||
# var/spack/repos/builtin/packages/intel-mpi/package.py
|
||||
# var/spack/repos/builtin/packages/intel-parallel-studio/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_mpi/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_parallel_studio/package.py
|
||||
#
|
||||
# They call _setup_dependent_env_callback() as well, but with the
|
||||
# dictionary kwarg compilers_of_client{} present and populated.
|
||||
@@ -1061,7 +1064,12 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# Handle everything in a callback version.
|
||||
self._setup_dependent_env_callback(env, dependent_spec)
|
||||
|
||||
def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
|
||||
def _setup_dependent_env_callback(
|
||||
self,
|
||||
env: EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
compilers_of_client={},
|
||||
) -> None:
|
||||
# Expected to be called from a client's
|
||||
# setup_dependent_build_environment(),
|
||||
# with args extended to convey the client's compilers as needed.
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
@@ -114,5 +115,7 @@ def install(
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
||||
|
@@ -4,6 +4,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
@@ -57,7 +58,9 @@ def install(
|
||||
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
# octave does not like those environment variables to be set:
|
||||
env.unset("CC")
|
||||
env.unset("CXX")
|
||||
|
@@ -106,8 +106,8 @@ def install_component(self, installer_path):
|
||||
|
||||
bash = Executable("bash")
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env("HOME", self.prefix)
|
||||
# Installer writes files in ~/intel set HOME so it goes to staging directory
|
||||
bash.add_default_env("HOME", join_path(self.stage.path, "home"))
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
|
||||
|
||||
@@ -132,7 +132,7 @@ def install_component(self, installer_path):
|
||||
if not isdir(install_dir):
|
||||
raise RuntimeError("install failed to directory: {0}".format(install_dir))
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
@@ -311,4 +311,4 @@ def ld_flags(self):
|
||||
|
||||
|
||||
#: Tuple of Intel math libraries, exported to packages
|
||||
INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio")
|
||||
INTEL_MATH_LIBRARIES = ("intel-oneapi-mkl",)
|
||||
|
@@ -13,9 +13,9 @@
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
||||
from llnl.util.lang import ClassProperty, classproperty, match_predicate
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -139,7 +139,7 @@ def view_file_conflicts(self, view, merge_map):
|
||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
||||
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
find_namespace = lang.match_predicate(namespace_re)
|
||||
find_namespace = match_predicate(namespace_re)
|
||||
if self.py_namespace in namespaces:
|
||||
conflicts = list(x for x in conflicts if not find_namespace(x))
|
||||
|
||||
@@ -206,7 +206,7 @@ def remove_files_from_view(self, view, merge_map):
|
||||
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
||||
)
|
||||
if self.py_namespace in remaining_namespaces:
|
||||
namespace_init = lang.match_predicate(
|
||||
namespace_init = match_predicate(
|
||||
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
)
|
||||
ignore_namespace = True
|
||||
@@ -324,6 +324,27 @@ def get_external_python_for_prefix(self):
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
def _homepage(cls: "PythonPackage") -> Optional[str]:
|
||||
"""Get the homepage from PyPI if available."""
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
@@ -351,25 +372,9 @@ class PythonPackage(PythonExtension):
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls) -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
|
||||
@property
|
||||
def python_spec(self) -> Spec:
|
||||
|
@@ -3,8 +3,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.lang as lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
from spack.directives import extends
|
||||
|
||||
@@ -54,6 +54,32 @@ def install(self, pkg, spec, prefix):
|
||||
pkg.module.R(*args)
|
||||
|
||||
|
||||
def _homepage(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
return None
|
||||
|
||||
|
||||
def _git(cls: "RPackage") -> Optional[str]:
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
class RPackage(Package):
|
||||
"""Specialized class for packages that are built using R.
|
||||
|
||||
@@ -77,24 +103,7 @@ class RPackage(Package):
|
||||
|
||||
extends("r")
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
|
||||
@lang.classproperty
|
||||
def git(cls):
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
git: ClassProperty[Optional[str]] = classproperty(_git)
|
||||
|
@@ -5,8 +5,8 @@
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
@@ -19,6 +19,12 @@
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
def _homepage(cls: "RacketPackage") -> Optional[str]:
|
||||
if cls.racket_name:
|
||||
return f"https://pkgs.racket-lang.org/package/{cls.racket_name}"
|
||||
return None
|
||||
|
||||
|
||||
class RacketPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Racket's
|
||||
`raco pkg install` and `raco setup` commands.
|
||||
@@ -37,13 +43,7 @@ class RacketPackage(PackageBase):
|
||||
extends("racket", when="build_system=racket")
|
||||
|
||||
racket_name: Optional[str] = None
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.racket_name:
|
||||
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
|
||||
|
||||
@spack.builder.builder("racket")
|
||||
|
351
lib/spack/spack/buildcache_migrate.py
Normal file
351
lib/spack/spack/buildcache_migrate.py
Normal file
@@ -0,0 +1,351 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
from typing import NamedTuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
from .url_buildcache import (
|
||||
BlobRecord,
|
||||
BuildcacheComponent,
|
||||
compressed_json_from_dict,
|
||||
get_url_buildcache_class,
|
||||
sign_file,
|
||||
try_verify,
|
||||
)
|
||||
|
||||
|
||||
def v2_tarball_directory_name(spec):
|
||||
"""
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
|
||||
|
||||
def v2_tarball_name(spec, ext):
|
||||
"""
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
def v2_tarball_path_name(spec, ext):
|
||||
"""
|
||||
Return the full path+name for a given spec according to the convention
|
||||
<tarball_directory_name>/<tarball_name>
|
||||
"""
|
||||
return os.path.join(v2_tarball_directory_name(spec), v2_tarball_name(spec, ext))
|
||||
|
||||
|
||||
class MigrateSpecResult(NamedTuple):
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
class MigrationException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when migration fails irrevocably
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
def _migrate_spec(
|
||||
s: spack.spec.Spec, mirror_url: str, tmpdir: str, unsigned: bool = False, signing_key: str = ""
|
||||
) -> MigrateSpecResult:
|
||||
"""Parallelizable function to migrate a single spec"""
|
||||
print_spec = f"{s.name}/{s.dag_hash()[:7]}"
|
||||
|
||||
# Check if the spec file exists in the new location and exit early if so
|
||||
|
||||
v3_cache_class = get_url_buildcache_class(layout_version=3)
|
||||
v3_cache_entry = v3_cache_class(mirror_url, s, allow_unsigned=unsigned)
|
||||
exists = v3_cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
v3_cache_entry.destroy()
|
||||
|
||||
if exists:
|
||||
msg = f"No need to migrate {print_spec}"
|
||||
return MigrateSpecResult(True, msg)
|
||||
|
||||
# Try to fetch the spec metadata
|
||||
v2_metadata_urls = [
|
||||
url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json.sig"))
|
||||
]
|
||||
|
||||
if unsigned:
|
||||
v2_metadata_urls.append(
|
||||
url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json"))
|
||||
)
|
||||
|
||||
spec_contents = None
|
||||
|
||||
for meta_url in v2_metadata_urls:
|
||||
try:
|
||||
_, _, meta_file = web_util.read_from_url(meta_url)
|
||||
spec_contents = codecs.getreader("utf-8")(meta_file).read()
|
||||
v2_spec_url = meta_url
|
||||
break
|
||||
except (web_util.SpackWebError, OSError):
|
||||
pass
|
||||
else:
|
||||
msg = f"Unable to read metadata for {print_spec}"
|
||||
return MigrateSpecResult(False, msg)
|
||||
|
||||
spec_dict = {}
|
||||
|
||||
if unsigned:
|
||||
# User asked for unsigned, if we found a signed specfile, just ignore
|
||||
# the signature
|
||||
if v2_spec_url.endswith(".sig"):
|
||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(spec_contents)
|
||||
else:
|
||||
spec_dict = json.loads(spec_contents)
|
||||
else:
|
||||
# User asked for signed, we must successfully verify the signature
|
||||
local_signed_pre_verify = os.path.join(
|
||||
tmpdir, f"{s.name}_{s.dag_hash()}_verify.spec.json.sig"
|
||||
)
|
||||
with open(local_signed_pre_verify, "w", encoding="utf-8") as fd:
|
||||
fd.write(spec_contents)
|
||||
if not try_verify(local_signed_pre_verify):
|
||||
return MigrateSpecResult(False, f"Failed to verify signature of {print_spec}")
|
||||
with open(local_signed_pre_verify, encoding="utf-8") as fd:
|
||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(fd.read())
|
||||
|
||||
# Read out and remove the bits needed to rename and position the archive
|
||||
bcc = spec_dict.pop("binary_cache_checksum", None)
|
||||
if not bcc:
|
||||
msg = "Cannot migrate a spec that does not have 'binary_cache_checksum'"
|
||||
return MigrateSpecResult(False, msg)
|
||||
|
||||
algorithm = bcc["hash_algorithm"]
|
||||
checksum = bcc["hash"]
|
||||
|
||||
# TODO: Remove this key once oci buildcache no longer uses it
|
||||
spec_dict["buildcache_layout_version"] = 2
|
||||
|
||||
v2_archive_url = url_util.join(mirror_url, "build_cache", v2_tarball_path_name(s, ".spack"))
|
||||
|
||||
# spacks web utilities do not include direct copying of s3 objects, so we
|
||||
# need to download the archive locally, and then push it back to the target
|
||||
# location
|
||||
archive_stage_path = os.path.join(tmpdir, f"archive_stage_{s.name}_{s.dag_hash()}")
|
||||
archive_stage = spack.stage.Stage(v2_archive_url, path=archive_stage_path)
|
||||
|
||||
try:
|
||||
archive_stage.create()
|
||||
archive_stage.fetch()
|
||||
except spack.error.FetchError:
|
||||
return MigrateSpecResult(False, f"Unable to fetch archive for {print_spec}")
|
||||
|
||||
local_tarfile_path = archive_stage.save_filename
|
||||
|
||||
# As long as we have to download the tarball anyway, we might as well compute the
|
||||
# checksum locally and check it against the expected value
|
||||
local_checksum = spack.util.crypto.checksum(
|
||||
spack.util.crypto.hash_fun_for_algo(algorithm), local_tarfile_path
|
||||
)
|
||||
|
||||
if local_checksum != checksum:
|
||||
return MigrateSpecResult(
|
||||
False, f"Checksum mismatch for {print_spec}: expected {checksum}, got {local_checksum}"
|
||||
)
|
||||
|
||||
spec_dict["archive_size"] = os.stat(local_tarfile_path).st_size
|
||||
|
||||
# Compress the spec dict and compute its checksum
|
||||
metadata_checksum_algo = "sha256"
|
||||
spec_json_path = os.path.join(tmpdir, f"{s.name}_{s.dag_hash()}.spec.json")
|
||||
metadata_checksum, metadata_size = compressed_json_from_dict(
|
||||
spec_json_path, spec_dict, metadata_checksum_algo
|
||||
)
|
||||
|
||||
tarball_blob_record = BlobRecord(
|
||||
spec_dict["archive_size"], v3_cache_class.TARBALL_MEDIATYPE, "gzip", algorithm, checksum
|
||||
)
|
||||
|
||||
metadata_blob_record = BlobRecord(
|
||||
metadata_size,
|
||||
v3_cache_class.SPEC_MEDIATYPE,
|
||||
"gzip",
|
||||
metadata_checksum_algo,
|
||||
metadata_checksum,
|
||||
)
|
||||
|
||||
# Compute the urls to the new blobs
|
||||
v3_archive_url = v3_cache_class.get_blob_url(mirror_url, tarball_blob_record)
|
||||
v3_spec_url = v3_cache_class.get_blob_url(mirror_url, metadata_blob_record)
|
||||
|
||||
# First push the tarball
|
||||
tty.debug(f"Pushing {local_tarfile_path} to {v3_archive_url}")
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_tarfile_path, v3_archive_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push archive for {print_spec}")
|
||||
|
||||
# Then push the spec file
|
||||
tty.debug(f"Pushing {spec_json_path} to {v3_spec_url}")
|
||||
|
||||
try:
|
||||
web_util.push_to_url(spec_json_path, v3_spec_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push spec metadata for {print_spec}")
|
||||
|
||||
# Generate the manifest and write it to a temporary location
|
||||
manifest = {
|
||||
"version": v3_cache_class.get_layout_version(),
|
||||
"data": [tarball_blob_record.to_dict(), metadata_blob_record.to_dict()],
|
||||
}
|
||||
|
||||
manifest_path = os.path.join(tmpdir, f"{s.dag_hash()}.manifest.json")
|
||||
with open(manifest_path, "w", encoding="utf-8") as f:
|
||||
json.dump(manifest, f, indent=0, separators=(",", ":"))
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995
|
||||
# chars). If lines are longer, they are truncated without error. So,
|
||||
# here we still add newlines, but no indent, so save on file size and
|
||||
# line length.
|
||||
|
||||
# Possibly sign the manifest
|
||||
if not unsigned:
|
||||
manifest_path = sign_file(signing_key, manifest_path)
|
||||
|
||||
v3_manifest_url = v3_cache_class.get_manifest_url(s, mirror_url)
|
||||
|
||||
# Push the manifest
|
||||
try:
|
||||
web_util.push_to_url(manifest_path, v3_manifest_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push manifest for {print_spec}")
|
||||
|
||||
return MigrateSpecResult(True, f"Successfully migrated {print_spec}")
|
||||
|
||||
|
||||
def migrate(
|
||||
mirror: spack.mirrors.mirror.Mirror, unsigned: bool = False, delete_existing: bool = False
|
||||
) -> None:
|
||||
"""Perform migration of the given mirror
|
||||
|
||||
If unsigned is True, signatures on signed specs will be ignored, and specs
|
||||
will not be re-signed before pushing to the new location. Otherwise, spack
|
||||
will attempt to verify signatures and re-sign specs, and will fail if not
|
||||
able to do so. If delete_existing is True, spack will delete the original
|
||||
contents of the mirror once the migration is complete."""
|
||||
signing_key = ""
|
||||
if not unsigned:
|
||||
try:
|
||||
signing_key = bindist.select_signing_key()
|
||||
except (bindist.NoKeyException, bindist.PickKeyException):
|
||||
raise MigrationException(
|
||||
"Signed migration requires exactly one secret key in keychain"
|
||||
)
|
||||
|
||||
delete_action = "deleting" if delete_existing else "keeping"
|
||||
sign_action = "an unsigned" if unsigned else "a signed"
|
||||
mirror_url = mirror.fetch_url
|
||||
|
||||
tty.msg(
|
||||
f"Performing {sign_action} migration of {mirror.push_url} "
|
||||
f"and {delete_action} existing contents"
|
||||
)
|
||||
|
||||
index_url = url_util.join(mirror_url, "build_cache", spack_db.INDEX_JSON_FILE)
|
||||
contents = None
|
||||
|
||||
try:
|
||||
_, _, index_file = web_util.read_from_url(index_url)
|
||||
contents = codecs.getreader("utf-8")(index_file).read()
|
||||
except (web_util.SpackWebError, OSError):
|
||||
raise MigrationException("Buildcache migration requires a buildcache index")
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
index_path = os.path.join(tmpdir, "_tmp_index.json")
|
||||
with open(index_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(contents)
|
||||
|
||||
db = bindist.BuildCacheDatabase(tmpdir)
|
||||
db._read_from_file(pathlib.Path(index_path))
|
||||
|
||||
specs_to_migrate = [
|
||||
s
|
||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
||||
if not s.external and db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
# Run the tasks in parallel if possible
|
||||
executor = spack.util.parallel.make_concurrent_executor()
|
||||
migrate_futures = [
|
||||
executor.submit(_migrate_spec, spec, mirror_url, tmpdir, unsigned, signing_key)
|
||||
for spec in specs_to_migrate
|
||||
]
|
||||
|
||||
success_count = 0
|
||||
|
||||
tty.msg("Migration summary:")
|
||||
for spec, migrate_future in zip(specs_to_migrate, migrate_futures):
|
||||
result = migrate_future.result()
|
||||
msg = f" {spec.name}/{spec.dag_hash()[:7]}: {result.message}"
|
||||
if result.success:
|
||||
success_count += 1
|
||||
tty.msg(msg)
|
||||
else:
|
||||
tty.error(msg)
|
||||
# The migrated index should have the same specs as the original index,
|
||||
# modulo any specs that we failed to migrate for whatever reason. So
|
||||
# to avoid having to re-fetch all the spec files now, just mark them
|
||||
# appropriately in the existing database and push that.
|
||||
db.mark(spec, "in_buildcache", result.success)
|
||||
|
||||
if success_count > 0:
|
||||
tty.msg("Updating index and pushing keys")
|
||||
|
||||
# If the layout.json doesn't yet exist on this mirror, push it
|
||||
v3_cache_class = get_url_buildcache_class(layout_version=3)
|
||||
v3_cache_class.maybe_push_layout_json(mirror_url)
|
||||
|
||||
# Push the migrated mirror index
|
||||
index_tmpdir = os.path.join(tmpdir, "rebuild_index")
|
||||
os.mkdir(index_tmpdir)
|
||||
bindist._push_index(db, index_tmpdir, mirror_url)
|
||||
|
||||
# Push the public part of the signing key
|
||||
if not unsigned:
|
||||
keys_tmpdir = os.path.join(tmpdir, "keys")
|
||||
os.mkdir(keys_tmpdir)
|
||||
bindist._url_push_keys(
|
||||
mirror_url, keys=[signing_key], update_index=True, tmpdir=keys_tmpdir
|
||||
)
|
||||
else:
|
||||
tty.warn("No specs migrated, did you mean to perform an unsigned migration instead?")
|
||||
|
||||
# Delete the old layout if the user requested it
|
||||
if delete_existing:
|
||||
delete_prefix = url_util.join(mirror_url, "build_cache")
|
||||
tty.msg(f"Recursively deleting {delete_prefix}")
|
||||
web_util.remove_url(delete_prefix, recursive=True)
|
||||
|
||||
tty.msg("Migration complete")
|
@@ -59,7 +59,7 @@ def __call__(self, spec, prefix):
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if cls and spack.repo.is_package_module(cls.__module__):
|
||||
return cls
|
||||
return None
|
||||
|
||||
@@ -121,6 +121,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
new_cls_name,
|
||||
bases,
|
||||
{
|
||||
"__module__": package_cls.__module__,
|
||||
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
|
||||
"test_requires_compiler": property(
|
||||
lambda x: x.wrapped_package_object.test_requires_compiler
|
||||
@@ -129,7 +130,6 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
"tester": property(lambda x: x.wrapped_package_object.tester),
|
||||
},
|
||||
)
|
||||
new_cls.__module__ = package_cls.__module__
|
||||
self.__class__ = new_cls
|
||||
self.__dict__.update(wrapped_pkg_object.__dict__)
|
||||
|
||||
@@ -185,10 +185,16 @@ def __init__(self, pkg):
|
||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||
# catch-all (*args, **kwargs)
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self,
|
||||
env: spack.util.environment.EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
return Adapter(pkg)
|
||||
@@ -402,7 +408,7 @@ def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set, Union
|
||||
from typing import Callable, Dict, List, Optional, Set, Union
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.path
|
||||
@@ -24,6 +24,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -32,6 +33,7 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
@@ -149,10 +151,10 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
return False
|
||||
|
||||
|
||||
def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
||||
def compute_affected_packages(rev1: str = "HEAD^", rev2: str = "HEAD") -> Set[str]:
|
||||
"""Determine which packages were added, removed or changed
|
||||
between rev1 and rev2, and return the names as a set"""
|
||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||
return spack.repo.get_all_package_diffs("ARC", spack.repo.builtin_repo(), rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
@@ -244,7 +246,9 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
||||
if not spec_locations:
|
||||
return RebuildDecision(True, "not found anywhere")
|
||||
|
||||
urls = ",".join([loc["mirror_url"] for loc in spec_locations])
|
||||
urls = ",".join(
|
||||
[f"{loc.url_and_version.url}@v{loc.url_and_version.version}" for loc in spec_locations]
|
||||
)
|
||||
message = f"up-to-date [{urls}]"
|
||||
return RebuildDecision(False, message)
|
||||
|
||||
@@ -613,32 +617,40 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_spec, and attempts to copy the files into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Args:
|
||||
Parameters:
|
||||
job_spec: spec associated with spack install log
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
|
||||
try:
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
except spack.error.SpackError as e:
|
||||
tty.error(f"Cannot copy logs: {str(e)}")
|
||||
if not job_spec.concrete:
|
||||
tty.warn("Cannot copy artifacts for non-concrete specs")
|
||||
return
|
||||
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if archive_root.is_dir():
|
||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||
else:
|
||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||
tty.warn(msg)
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
if not os.path.isdir(package_metadata_root):
|
||||
# Fallback to using the stage directory
|
||||
job_pkg = job_spec.package
|
||||
|
||||
package_metadata_root = pathlib.Path(job_pkg.stage.path)
|
||||
archive_files = spack.builder.create(job_pkg).archive_files
|
||||
tty.warn("Package not installed, falling back to use stage dir")
|
||||
tty.debug(f"stage dir: {package_metadata_root}")
|
||||
else:
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if os.path.isdir(archive_root):
|
||||
archive_files = [str(f) for f in archive_root.rglob("*") if os.path.isfile(f)]
|
||||
else:
|
||||
tty.debug(f"No archived files detected at {archive_root}")
|
||||
|
||||
# Try zipped and unzipped versions of the build log
|
||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||
build_log = package_metadata_root / "spack-build-out.txt"
|
||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||
|
||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir)
|
||||
for f in [build_log_zipped, build_log, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir, compress_artifacts=True)
|
||||
|
||||
|
||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
@@ -651,11 +663,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
"""
|
||||
tty.debug(f"test stage: {test_stage}")
|
||||
if not os.path.exists(test_stage):
|
||||
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
|
||||
tty.error(msg)
|
||||
tty.error(f"Cannot copy test logs: job test stage ({test_stage}) does not exist")
|
||||
return
|
||||
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
copy_files_to_artifacts(
|
||||
os.path.join(test_stage, "*", "*.txt"), job_test_dir, compress_artifacts=True
|
||||
)
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
||||
@@ -1232,33 +1245,31 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
"""Given a url to write to and the details of the failed job, write an entry
|
||||
in the broken specs list.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
file_path = os.path.join(tmpdir, "broken.txt")
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
file_path = os.path.join(tmpdir, "broken.txt")
|
||||
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-name": pkg_name,
|
||||
"job-stack": stack_name,
|
||||
"job-url": job_url,
|
||||
"pipeline-url": pipeline_url,
|
||||
"concrete-spec-dict": spec_dict,
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-name": pkg_name,
|
||||
"job-stack": stack_name,
|
||||
"job-url": job_url,
|
||||
"pipeline-url": pipeline_url,
|
||||
"concrete-spec-dict": spec_dict,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(file_path, "w", encoding="utf-8") as fd:
|
||||
syaml.dump(broken_spec_details, fd)
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = f"Error writing to broken specs list {url}: {err}"
|
||||
tty.warn(msg)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
try:
|
||||
with open(file_path, "w", encoding="utf-8") as fd:
|
||||
syaml.dump(broken_spec_details, fd)
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = f"Error writing to broken specs list {url}: {err}"
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def read_broken_spec(broken_spec_url):
|
||||
@@ -1294,35 +1305,34 @@ def display_broken_spec_messages(base_url, hashes):
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def run_standalone_tests(**kwargs):
|
||||
def run_standalone_tests(
|
||||
*,
|
||||
cdash: Optional[CDashHandler] = None,
|
||||
fail_fast: bool = False,
|
||||
log_file: Optional[str] = None,
|
||||
job_spec: Optional[spack.spec.Spec] = None,
|
||||
repro_dir: Optional[str] = None,
|
||||
timeout: Optional[int] = None,
|
||||
):
|
||||
"""Run stand-alone tests on the current spec.
|
||||
|
||||
Arguments:
|
||||
kwargs (dict): dictionary of arguments used to run the tests
|
||||
|
||||
List of recognized keys:
|
||||
|
||||
* "cdash" (CDashHandler): (optional) cdash handler instance
|
||||
* "fail_fast" (bool): (optional) terminate tests after the first failure
|
||||
* "log_file" (str): (optional) test log file name if NOT CDash reporting
|
||||
* "job_spec" (Spec): spec that was built
|
||||
* "repro_dir" (str): reproduction directory
|
||||
Args:
|
||||
cdash: cdash handler instance
|
||||
fail_fast: terminate tests after the first failure
|
||||
log_file: test log file name if NOT CDash reporting
|
||||
job_spec: spec that was built
|
||||
repro_dir: reproduction directory
|
||||
timeout: maximum time (in seconds) that tests are allowed to run
|
||||
"""
|
||||
cdash = kwargs.get("cdash")
|
||||
fail_fast = kwargs.get("fail_fast")
|
||||
log_file = kwargs.get("log_file")
|
||||
|
||||
if cdash and log_file:
|
||||
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
|
||||
log_file = None
|
||||
|
||||
# Error out but do NOT terminate if there are missing required arguments.
|
||||
job_spec = kwargs.get("job_spec")
|
||||
if not job_spec:
|
||||
tty.error("Job spec is required to run stand-alone tests")
|
||||
return
|
||||
|
||||
repro_dir = kwargs.get("repro_dir")
|
||||
if not repro_dir:
|
||||
tty.error("Reproduction directory is required for stand-alone tests")
|
||||
return
|
||||
@@ -1331,6 +1341,9 @@ def run_standalone_tests(**kwargs):
|
||||
if fail_fast:
|
||||
test_args.append("--fail-fast")
|
||||
|
||||
if timeout is not None:
|
||||
test_args.extend(["--timeout", str(timeout)])
|
||||
|
||||
if cdash:
|
||||
test_args.extend(cdash.args())
|
||||
else:
|
||||
|
@@ -2,9 +2,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import copy
|
||||
import errno
|
||||
import glob
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
@@ -25,13 +29,14 @@
|
||||
import spack.mirrors.mirror
|
||||
import spack.schema
|
||||
import spack.spec
|
||||
import spack.util.compression as compression
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
from spack.url_buildcache import get_url_buildcache_class
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
@@ -40,22 +45,67 @@
|
||||
_urlopen = web_util.urlopen
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
def copy_gzipped(glob_or_path: str, dest: str) -> None:
|
||||
"""Copy all of the files in the source glob/path to the destination.
|
||||
|
||||
Args:
|
||||
glob_or_path: path to file to test
|
||||
dest: destination path to copy to
|
||||
"""
|
||||
|
||||
files = glob.glob(glob_or_path)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(glob_or_path), errno.ENOENT)
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(glob_or_path, dest)
|
||||
)
|
||||
|
||||
def is_gzipped(path):
|
||||
with open(path, "rb") as fd:
|
||||
return compression.GZipFileType().matches_magic(fd)
|
||||
|
||||
for src in files:
|
||||
if is_gzipped(src):
|
||||
fs.copy(src, dest)
|
||||
else:
|
||||
# Compress and copy in one step
|
||||
src_name = os.path.basename(src)
|
||||
if os.path.isdir(dest):
|
||||
zipped = os.path.join(dest, f"{src_name}.gz")
|
||||
elif not dest.endswith(".gz"):
|
||||
zipped = f"{dest}.gz"
|
||||
else:
|
||||
zipped = dest
|
||||
|
||||
with open(src, "rb") as fin, gzip.open(zipped, "wb") as fout:
|
||||
shutil.copyfileobj(fin, fout)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(
|
||||
src: str, artifacts_dir: str, *, compress_artifacts: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Copy file(s) to the given artifacts directory
|
||||
|
||||
Parameters:
|
||||
Args:
|
||||
src (str): the glob-friendly path expression for the file(s) to copy
|
||||
artifacts_dir (str): the destination directory
|
||||
compress_artifacts (bool): option to compress copied artifacts using Gzip
|
||||
"""
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
|
||||
if compress_artifacts:
|
||||
copy_gzipped(src, artifacts_dir)
|
||||
else:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
msg = (
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
tty.warn(
|
||||
(
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
)
|
||||
)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
@@ -129,33 +179,13 @@ def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
|
||||
|
||||
for release_spec in specs:
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
buildcache_copies[release_spec_dag_hash] = {
|
||||
"src": cache_class.get_manifest_url(release_spec, src_prefix),
|
||||
"dest": cache_class.get_manifest_url(release_spec, dest_prefix),
|
||||
}
|
||||
|
||||
target_dir = os.path.dirname(output_file)
|
||||
|
||||
|
@@ -292,6 +292,9 @@ def main_script_replacements(cmd):
|
||||
)
|
||||
maybe_generate_manifest(pipeline, options, manifest_path)
|
||||
|
||||
relative_specs_url = bindist.buildcache_relative_specs_url()
|
||||
relative_keys_url = bindist.buildcache_relative_keys_url()
|
||||
|
||||
if options.pipeline_type == PipelineType.COPY_ONLY:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
@@ -301,9 +304,12 @@ def main_script_replacements(cmd):
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"][
|
||||
"SPACK_COPY_ONLY_DESTINATION"
|
||||
] = options.buildcache_destination.fetch_url
|
||||
sync_job["variables"].update(
|
||||
{
|
||||
"SPACK_COPY_ONLY_DESTINATION": options.buildcache_destination.fetch_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url,
|
||||
}
|
||||
)
|
||||
|
||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
if "buildcache-source" not in pipeline_mirrors:
|
||||
@@ -333,9 +339,13 @@ def main_script_replacements(cmd):
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"][
|
||||
"SPACK_BUILDCACHE_DESTINATION"
|
||||
] = options.buildcache_destination.push_url
|
||||
signing_job["variables"].update(
|
||||
{
|
||||
"SPACK_BUILDCACHE_DESTINATION": options.buildcache_destination.push_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_SPECS_URL": relative_specs_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url,
|
||||
}
|
||||
)
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
@@ -436,7 +436,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
status_fn (typing.Callable): if provided, prepend install-status info
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
specfile_format (bool): specfile format of the current spec
|
||||
"""
|
||||
|
||||
def get_arg(name, default=None):
|
||||
@@ -458,6 +458,7 @@ def get_arg(name, default=None):
|
||||
all_headers = get_arg("all_headers", False)
|
||||
output = get_arg("output", sys.stdout)
|
||||
status_fn = get_arg("status_fn", None)
|
||||
specfile_format = get_arg("specfile_format", False)
|
||||
|
||||
decorator = get_arg("decorator", None)
|
||||
if decorator is None:
|
||||
@@ -479,6 +480,9 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||
|
||||
if specfile_format:
|
||||
format_string = "[{specfile_version}] " + format_string
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -28,7 +29,7 @@
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache-v3.tar.gz"
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||
@@ -410,8 +411,9 @@ def _mirror(args):
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, "build_cache")
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
stage_dir = pathlib.Path(stage.source_path)
|
||||
for entry in stage_dir.iterdir():
|
||||
shutil.move(str(entry), mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
@@ -436,7 +438,6 @@ def write_metadata(subdir, metadata):
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||
instructions += cmd.format("local-binaries", rel_directory)
|
||||
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||
print(instructions)
|
||||
|
||||
|
||||
|
@@ -4,11 +4,9 @@
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -27,14 +25,21 @@
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
from ..buildcache_migrate import migrate
|
||||
from ..enums import InstallRecordStatus
|
||||
from ..url_buildcache import (
|
||||
BuildcacheComponent,
|
||||
BuildcacheEntryError,
|
||||
URLBuildcacheEntry,
|
||||
check_mirror_for_layout,
|
||||
get_url_buildcache_class,
|
||||
)
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -76,9 +81,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default=False,
|
||||
help="regenerate buildcache index after building package(s)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
push.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
@@ -192,28 +194,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
# Unfortunately there are 3 ways to do the same thing here:
|
||||
check_specs = check.add_mutually_exclusive_group()
|
||||
check_specs.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
check_specs.add_argument(
|
||||
"--spec-file",
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(check, ["specs"])
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
)
|
||||
download.add_argument("-s", "--spec", help="download built tarball for spec from mirror")
|
||||
download.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
@@ -223,28 +211,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
# Get buildcache name
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
@@ -307,6 +277,27 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
# Migrate a buildcache from layout_version 2 to version 3
|
||||
migrate = subparsers.add_parser("migrate", help=migrate_fn.__doc__)
|
||||
migrate.add_argument("mirror", type=arguments.mirror_name, help="name of a configured mirror")
|
||||
migrate.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Ignore signatures and do not resign, default is False",
|
||||
)
|
||||
migrate.add_argument(
|
||||
"-d",
|
||||
"--delete-existing",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Delete the previous layout, the default is to keep it.",
|
||||
)
|
||||
arguments.add_common_arguments(migrate, ["yes_to_all"])
|
||||
# TODO: add -y argument to prompt if user really means to delete existing
|
||||
migrate.set_defaults(func=migrate_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
@@ -380,14 +371,8 @@ def _specs_to_be_packaged(
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
if args.specs:
|
||||
roots = _matching_specs(spack.cmd.parse_specs(args.specs))
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
@@ -438,6 +423,10 @@ def push_fn(args):
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
)
|
||||
|
||||
# Warn about possible old binary mirror layout
|
||||
if not mirror.push_url.startswith("oci://"):
|
||||
check_mirror_for_layout(mirror)
|
||||
|
||||
with bindist.make_uploader(
|
||||
mirror=mirror,
|
||||
force=args.force,
|
||||
@@ -529,22 +518,7 @@ def check_fn(args: argparse.Namespace):
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec_file:
|
||||
specs_arg = (
|
||||
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
|
||||
)
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
elif args.spec:
|
||||
specs_arg = args.spec
|
||||
tty.warn(
|
||||
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
else:
|
||||
specs_arg = args.specs
|
||||
specs_arg = args.specs
|
||||
|
||||
if specs_arg:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
||||
@@ -578,28 +552,12 @@ def download_fn(args):
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec))
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
bindist.download_single_spec(specs[0], args.path)
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
@@ -609,13 +567,7 @@ def save_specfile_fn(args):
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if args.root_specfile:
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
specs = spack.cmd.parse_specs(args.root_spec)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
@@ -630,29 +582,78 @@ def save_specfile_fn(args):
|
||||
)
|
||||
|
||||
|
||||
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
"""Copy from source url to destination url"""
|
||||
tmpdir = None
|
||||
def copy_buildcache_entry(cache_entry: URLBuildcacheEntry, destination_url: str):
|
||||
"""Download buildcache entry and copy it to the destination_url"""
|
||||
try:
|
||||
spec_dict = cache_entry.fetch_metadata()
|
||||
cache_entry.fetch_archive()
|
||||
except bindist.BuildcacheEntryError as e:
|
||||
tty.warn(f"Failed to retrieve buildcache for copying due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
if not local_path:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
local_path = os.path.join(tmpdir, os.path.basename(src_url))
|
||||
spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC)
|
||||
local_spec_path = cache_entry.get_local_spec_path()
|
||||
tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL)
|
||||
local_tarball_path = cache_entry.get_local_archive_path()
|
||||
|
||||
target_spec = spack.spec.Spec.from_dict(spec_dict)
|
||||
spec_label = f"{target_spec.name}/{target_spec.dag_hash()[:7]}"
|
||||
|
||||
if not tarball_blob_record:
|
||||
cache_entry.destroy()
|
||||
raise BuildcacheEntryError(f"No source tarball blob record, failed to sync {spec_label}")
|
||||
|
||||
# Try to push the tarball
|
||||
tarball_dest_url = cache_entry.get_blob_url(destination_url, tarball_blob_record)
|
||||
|
||||
try:
|
||||
temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
|
||||
try:
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except spack.error.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
temp_stage.destroy()
|
||||
finally:
|
||||
if tmpdir and os.path.exists(tmpdir):
|
||||
shutil.rmtree(tmpdir)
|
||||
web_util.push_to_url(local_tarball_path, tarball_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push {local_tarball_path} to {tarball_dest_url} due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
if not spec_blob_record:
|
||||
cache_entry.destroy()
|
||||
raise BuildcacheEntryError(f"No source spec blob record, failed to sync {spec_label}")
|
||||
|
||||
# Try to push the spec file
|
||||
spec_dest_url = cache_entry.get_blob_url(destination_url, spec_blob_record)
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_spec_path, spec_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push {local_spec_path} to {spec_dest_url} due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
# Stage the manifest locally, since if it's signed, we don't want to try to
|
||||
# to reproduce that here. Instead just push the locally staged manifest to
|
||||
# the expected path at the destination url.
|
||||
manifest_src_url = cache_entry.remote_manifest_url
|
||||
manifest_dest_url = cache_entry.get_manifest_url(target_spec, destination_url)
|
||||
|
||||
manifest_stage = spack.stage.Stage(manifest_src_url)
|
||||
|
||||
try:
|
||||
manifest_stage.create()
|
||||
manifest_stage.fetch()
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to fetch manifest from {manifest_src_url} due to {e}")
|
||||
manifest_stage.destroy()
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
local_manifest_path = manifest_stage.save_filename
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_manifest_path, manifest_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push manifest to {manifest_dest_url} due to {e}")
|
||||
|
||||
manifest_stage.destroy()
|
||||
cache_entry.destroy()
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
@@ -692,37 +693,21 @@ def sync_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
buildcache_rel_paths = []
|
||||
|
||||
tty.debug("Syncing the following specs:")
|
||||
for s in env.all_specs():
|
||||
specs_to_sync = [s for s in env.all_specs() if not s.external]
|
||||
for s in specs_to_sync:
|
||||
tty.debug(" {0}{1}: {2}".format("* " if s in env.roots() else " ", s.name, s.dag_hash()))
|
||||
|
||||
buildcache_rel_paths.extend(
|
||||
[
|
||||
os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json.sig")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")),
|
||||
]
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
for rel_path in buildcache_rel_paths:
|
||||
src_url = url_util.join(src_mirror_url, rel_path)
|
||||
local_path = os.path.join(tmpdir, rel_path)
|
||||
dest_url = url_util.join(dest_mirror_url, rel_path)
|
||||
|
||||
tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path))
|
||||
copy_buildcache_file(src_url, dest_url, local_path=local_path)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
src_cache_entry = cache_class(src_mirror_url, s, allow_unsigned=True)
|
||||
src_cache_entry.read_manifest()
|
||||
copy_buildcache_entry(src_cache_entry, dest_mirror_url)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
def manifest_copy(
|
||||
manifest_file_list: List[str], dest_mirror: Optional[spack.mirrors.mirror.Mirror] = None
|
||||
):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
@@ -732,21 +717,24 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
for manifest_path in manifest_file_list:
|
||||
with open(manifest_path, encoding="utf-8") as fd:
|
||||
manifest = json.loads(fd.read())
|
||||
for spec_hash, copy_list in manifest.items():
|
||||
for spec_hash, copy_obj in manifest.items():
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
deduped_manifest[spec_hash] = copy_obj
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
dest = copy_file["dest"]
|
||||
if dest_mirror:
|
||||
src_relative_path = os.path.join(
|
||||
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||
)
|
||||
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
for spec_hash, copy_obj in deduped_manifest.items():
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
src_cache_entry = cache_class(
|
||||
cache_class.get_base_url(copy_obj["src"]), allow_unsigned=True
|
||||
)
|
||||
src_cache_entry.read_manifest(manifest_url=copy_obj["src"])
|
||||
if dest_mirror:
|
||||
destination_url = dest_mirror.push_url
|
||||
else:
|
||||
destination_url = cache_class.get_base_url(copy_obj["dest"])
|
||||
tty.debug("copying {0} to {1}".format(copy_obj["src"], destination_url))
|
||||
copy_buildcache_entry(src_cache_entry, destination_url)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
@@ -770,13 +758,9 @@ def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
bindist._url_generate_package_index(url, tmpdir)
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||
)
|
||||
|
||||
try:
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_key_index(keys_url, tmpdir)
|
||||
bindist.generate_key_index(url, tmpdir)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
@@ -788,5 +772,53 @@ def update_index_fn(args):
|
||||
return update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def migrate_fn(args):
|
||||
"""perform in-place binary mirror migration (2 to 3)
|
||||
|
||||
A mirror can contain both layout version 2 and version 3 simultaneously without
|
||||
interference. This command performs in-place migration of a binary mirror laid
|
||||
out according to version 2, to a binary mirror laid out according to layout
|
||||
version 3. Only indexed specs will be migrated, so consider updating the mirror
|
||||
index before running this command. Re-run the command to migrate any missing
|
||||
items.
|
||||
|
||||
The default mode of operation is to perform a signed migration, that is, spack
|
||||
will attempt to verify the signatures on specs, and then re-sign them before
|
||||
migration, using whatever keys are already installed in your key ring. You can
|
||||
migrate a mirror of unsigned binaries (or convert a mirror of signed binaries
|
||||
to unsigned) by providing the --unsigned argument.
|
||||
|
||||
By default spack will leave the original mirror contents (in the old layout) in
|
||||
place after migration. You can have spack remove the old contents by providing
|
||||
the --delete-existing argument. Because migrating a mostly-already-migrated
|
||||
mirror should be fast, consider a workflow where you perform a default migration,
|
||||
(i.e. preserve the existing layout rather than deleting it) then evaluate the
|
||||
state of the migrated mirror by attempting to install from it, and finally
|
||||
running the migration again with --delete-existing."""
|
||||
target_mirror = args.mirror
|
||||
unsigned = args.unsigned
|
||||
assert isinstance(target_mirror, spack.mirrors.mirror.Mirror)
|
||||
delete_existing = args.delete_existing
|
||||
|
||||
proceed = True
|
||||
if delete_existing and not args.yes_to_all:
|
||||
msg = (
|
||||
"Using --delete-existing will delete the entire contents \n"
|
||||
" of the old layout within the mirror. Because migrating a mirror \n"
|
||||
" that has already been migrated should be fast, consider a workflow \n"
|
||||
" where you perform a default migration (i.e. preserve the existing \n"
|
||||
" layout rather than deleting it), then evaluate the state of the \n"
|
||||
" migrated mirror by attempting to install from it, and finally, \n"
|
||||
" run the migration again with --delete-existing."
|
||||
)
|
||||
tty.warn(msg)
|
||||
proceed = tty.get_yes_or_no("Do you want to proceed?", default=False)
|
||||
|
||||
if not proceed:
|
||||
tty.die("Migration aborted.")
|
||||
|
||||
migrate(target_mirror, unsigned=unsigned, delete_existing=delete_existing)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
return args.func(args)
|
||||
|
@@ -4,7 +4,6 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Dict
|
||||
@@ -26,12 +25,10 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.mirrors.mirror
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
@@ -45,7 +42,6 @@
|
||||
SPACK_COMMAND = "spack"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
|
||||
|
||||
|
||||
def deindent(desc):
|
||||
@@ -164,6 +160,12 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="stop stand-alone tests after the first failure",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--timeout",
|
||||
type=int,
|
||||
default=None,
|
||||
help="maximum time (in seconds) that tests are allowed to run",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
||||
|
||||
@@ -421,7 +423,7 @@ def ci_rebuild(args):
|
||||
# jobs in subsequent stages.
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
tty.msg(" {0}".format(match.url_and_version.url))
|
||||
|
||||
# Now we are done and successful
|
||||
return 0
|
||||
@@ -451,7 +453,7 @@ def ci_rebuild(args):
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--only=package"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
@@ -491,6 +493,9 @@ def ci_rebuild(args):
|
||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Clear the stage directory
|
||||
spack.stage.purge()
|
||||
|
||||
# If the installation succeeded and we're running stand-alone tests for
|
||||
# the package, run them and copy the output. Failures of any kind should
|
||||
# *not* terminate the build process or preclude creating the build cache.
|
||||
@@ -525,6 +530,7 @@ def ci_rebuild(args):
|
||||
fail_fast=args.fail_fast,
|
||||
log_file=log_file,
|
||||
repro_dir=repro_dir,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
@@ -783,18 +789,17 @@ def ci_verify_versions(args):
|
||||
then parses the git diff between the two to determine which packages
|
||||
have been modified verifies the new checksums inside of them.
|
||||
"""
|
||||
with fs.working_dir(spack.paths.prefix):
|
||||
# We use HEAD^1 explicitly on the merge commit created by
|
||||
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
|
||||
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
|
||||
|
||||
# Get a list of package names from the modified files.
|
||||
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
|
||||
# Get a list of all packages that have been changed or added
|
||||
# between from_ref and to_ref
|
||||
pkgs = spack.repo.get_all_package_diffs(
|
||||
"AC", spack.repo.builtin_repo(), args.from_ref, args.to_ref
|
||||
)
|
||||
|
||||
failed_version = False
|
||||
for pkg_name, path in pkgs:
|
||||
for pkg_name in pkgs:
|
||||
spec = spack.spec.Spec(pkg_name)
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
path = spack.repo.PATH.package_path(pkg_name)
|
||||
|
||||
# Skip checking manual download packages and trust the maintainers
|
||||
if pkg.manual_download:
|
||||
@@ -818,7 +823,7 @@ def ci_verify_versions(args):
|
||||
# TODO: enforce every version have a commit or a sha256 defined if not
|
||||
# an infinite version (there are a lot of package's where this doesn't work yet.)
|
||||
|
||||
with fs.working_dir(spack.paths.prefix):
|
||||
with fs.working_dir(os.path.dirname(path)):
|
||||
added_checksums = spack_ci.get_added_versions(
|
||||
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
|
||||
)
|
||||
|
@@ -63,7 +63,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help="list available compilers")
|
||||
list_parser = sp.add_parser("list", aliases=["ls"], help="list available compilers")
|
||||
list_parser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
@@ -216,5 +216,6 @@ def compiler(parser, args):
|
||||
"rm": compiler_remove,
|
||||
"info": compiler_info,
|
||||
"list": compiler_list,
|
||||
"ls": compiler_list,
|
||||
}
|
||||
action[args.compiler_command](args)
|
||||
|
@@ -23,7 +23,7 @@
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
from spack.util.naming import pkg_name_to_class_name, simplify_name
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
@@ -95,7 +95,7 @@ class BundlePackageTemplate:
|
||||
|
||||
def __init__(self, name: str, versions, languages: List[str]):
|
||||
self.name = name
|
||||
self.class_name = mod_to_class(name)
|
||||
self.class_name = pkg_name_to_class_name(name)
|
||||
self.versions = versions
|
||||
self.languages = languages
|
||||
|
||||
@@ -572,7 +572,7 @@ def edit(self, spec, prefix):
|
||||
class IntelPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for licensed Intel software"""
|
||||
|
||||
base_class_name = "IntelPackage"
|
||||
base_class_name = "IntelOneApiPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override `setup_environment` if necessary."""
|
||||
@@ -874,7 +874,7 @@ def get_name(name, url):
|
||||
|
||||
result = simplify_name(result)
|
||||
|
||||
if not valid_fully_qualified_module_name(result):
|
||||
if not re.match(r"^[a-z0-9-]+$", result):
|
||||
tty.die("Package name can only contain a-z, 0-9, and '-'")
|
||||
|
||||
return result
|
||||
|
@@ -102,7 +102,7 @@ def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Sp
|
||||
)
|
||||
else:
|
||||
# look up the maximum version so infintiy versions are preferred for develop
|
||||
version = max(spec.package_class.versions.keys())
|
||||
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||
spec.versions = spack.version.VersionList([version])
|
||||
|
||||
|
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
||||
"package Spack knows how to find."
|
||||
)
|
||||
|
||||
sp.add_parser("list", help="list detectable packages, by repository and name")
|
||||
sp.add_parser("list", aliases=["ls"], help="list detectable packages, by repository and name")
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
@@ -259,6 +259,7 @@ def external(parser, args):
|
||||
action = {
|
||||
"find": external_find,
|
||||
"list": external_list,
|
||||
"ls": external_list,
|
||||
"read-cray-manifest": external_read_cray_manifest,
|
||||
}
|
||||
action[args.external_command](args)
|
||||
|
@@ -51,6 +51,12 @@ def setup_parser(subparser):
|
||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--specfile-format",
|
||||
action="store_true",
|
||||
help="show the specfile format for installed deps ",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||
)
|
||||
@@ -280,6 +286,7 @@ def root_decorator(spec, string):
|
||||
show_flags=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
print()
|
||||
@@ -301,6 +308,7 @@ def root_decorator(spec, string):
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
print()
|
||||
|
||||
@@ -390,7 +398,12 @@ def find(parser, args):
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
display_results,
|
||||
args,
|
||||
decorator=decorator,
|
||||
all_headers=True,
|
||||
status_fn=status_fn,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
|
@@ -10,11 +10,13 @@
|
||||
import re
|
||||
import sys
|
||||
from html import escape
|
||||
from typing import Type
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import VersionList
|
||||
@@ -139,10 +141,10 @@ def name_only(pkgs, out):
|
||||
tty.msg("%d packages" % len(pkgs))
|
||||
|
||||
|
||||
def github_url(pkg):
|
||||
def github_url(pkg: Type[spack.package_base.PackageBase]) -> str:
|
||||
"""Link to a package file on github."""
|
||||
url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
|
||||
return url.format(pkg.name)
|
||||
mod_path = pkg.__module__.replace(".", "/")
|
||||
return f"https://github.com/spack/spack/blob/develop/var/spack/{mod_path}.py"
|
||||
|
||||
|
||||
def rows_for_ncols(elts, ncols):
|
||||
|
@@ -89,17 +89,17 @@ def setup_parser(subparser):
|
||||
|
||||
def pkg_add(args):
|
||||
"""add a package to the git stage with `git add`"""
|
||||
spack.repo.add_package_to_git_stage(args.packages)
|
||||
spack.repo.add_package_to_git_stage(args.packages, spack.repo.builtin_repo())
|
||||
|
||||
|
||||
def pkg_list(args):
|
||||
"""list packages associated with a particular spack git revision"""
|
||||
colify(spack.repo.list_packages(args.rev))
|
||||
colify(spack.repo.list_packages(args.rev, spack.repo.builtin_repo()))
|
||||
|
||||
|
||||
def pkg_diff(args):
|
||||
"""compare packages available in two different git revisions"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
|
||||
if u1:
|
||||
print("%s:" % args.rev1)
|
||||
@@ -114,21 +114,23 @@ def pkg_diff(args):
|
||||
|
||||
def pkg_removed(args):
|
||||
"""show packages removed since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u1:
|
||||
colify(sorted(u1))
|
||||
|
||||
|
||||
def pkg_added(args):
|
||||
"""show packages added since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u2:
|
||||
colify(sorted(u2))
|
||||
|
||||
|
||||
def pkg_changed(args):
|
||||
"""show packages changed since a commit"""
|
||||
packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
|
||||
packages = spack.repo.get_all_package_diffs(
|
||||
args.type, spack.repo.builtin_repo(), args.rev1, args.rev2
|
||||
)
|
||||
|
||||
if packages:
|
||||
colify(sorted(packages))
|
||||
|
@@ -4,6 +4,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -24,9 +25,7 @@ def setup_parser(subparser):
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
"namespace",
|
||||
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||
nargs="?",
|
||||
"namespace", help="name or namespace to identify packages in the repository"
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"-d",
|
||||
@@ -138,7 +137,7 @@ def repo_remove(args):
|
||||
def repo_list(args):
|
||||
"""show registered repositories and their namespaces"""
|
||||
roots = spack.config.get("repos", scope=args.scope)
|
||||
repos = []
|
||||
repos: List[spack.repo.Repo] = []
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(spack.repo.from_path(r))
|
||||
@@ -146,17 +145,14 @@ def repo_list(args):
|
||||
continue
|
||||
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
tty.msg(f"{len(repos)} package repositor" + ("y." if len(repos) == 1 else "ies."))
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
||||
max_ns_len = max(len(r.namespace) for r in repos)
|
||||
for repo in repos:
|
||||
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||
print(fmt % (repo.namespace, repo.root))
|
||||
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
|
@@ -136,20 +136,7 @@ def solve(parser, args):
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
if unify == "when_possible":
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
@@ -166,3 +153,29 @@ def solve(parser, args):
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
elif unify:
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for spec in specs:
|
||||
tty.msg("SOLVING SPEC:", spec)
|
||||
result = solver.solve(
|
||||
[spec],
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
@@ -56,10 +56,10 @@ def is_package(f):
|
||||
"""Whether flake8 should consider a file as a core file or a package.
|
||||
|
||||
We run flake8 with different exceptions for the core and for
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
packages, since we allow `from spack.package import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/") and f.endswith("package.py")
|
||||
return f.startswith("var/spack/") and f.endswith("package.py")
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -380,7 +380,7 @@ def run_black(black_cmd, file_list, args):
|
||||
def _module_part(root: str, expr: str):
|
||||
parts = expr.split(".")
|
||||
# spack.pkg is for repositories, don't try to resolve it here.
|
||||
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||
if expr.startswith(spack.repo.PKG_MODULE_PREFIX_V1) or expr == "spack.pkg":
|
||||
return None
|
||||
while parts:
|
||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||
|
@@ -65,6 +65,12 @@ def setup_parser(subparser):
|
||||
run_parser.add_argument(
|
||||
"--help-cdash", action="store_true", help="show usage instructions for CDash reporting"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--timeout",
|
||||
type=int,
|
||||
default=None,
|
||||
help="maximum time (in seconds) that tests are allowed to run",
|
||||
)
|
||||
|
||||
cd_group = run_parser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ["clean", "dirty"])
|
||||
@@ -176,7 +182,7 @@ def test_run(args):
|
||||
for spec in specs:
|
||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
tty.warn(f"No {explicit_str}installed packages match spec {spec}")
|
||||
|
||||
# TODO: Need to write out a log message and/or CDASH Testing
|
||||
# output that package not installed IF continue to process
|
||||
@@ -192,7 +198,7 @@ def test_run(args):
|
||||
# test_stage_dir
|
||||
test_suite = spack.install_test.TestSuite(specs_to_test, args.alias)
|
||||
test_suite.ensure_stage()
|
||||
tty.msg("Spack test %s" % test_suite.name)
|
||||
tty.msg(f"Spack test {test_suite.name}")
|
||||
|
||||
# Set up reporter
|
||||
setattr(args, "package", [s.format() for s in test_suite.specs])
|
||||
@@ -204,6 +210,7 @@ def test_run(args):
|
||||
dirty=args.dirty,
|
||||
fail_first=args.fail_first,
|
||||
externals=args.externals,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -18,6 +18,10 @@ class Languages(enum.Enum):
|
||||
|
||||
|
||||
class CompilerAdaptor:
|
||||
"""Provides access to compiler attributes via `Package.compiler`. Useful for
|
||||
packages which do not yet access compiler properties via `self.spec[language]`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
|
||||
) -> None:
|
||||
@@ -79,6 +83,14 @@ def implicit_rpaths(self) -> List[str]:
|
||||
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
||||
return result
|
||||
|
||||
@property
|
||||
def opt_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.opt_flags
|
||||
|
||||
@property
|
||||
def debug_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.debug_flags
|
||||
|
||||
@property
|
||||
def openmp_flag(self) -> str:
|
||||
return next(iter(self.compilers.values())).package.openmp_flag
|
||||
@@ -140,7 +152,7 @@ def c17_flag(self) -> str:
|
||||
@property
|
||||
def c23_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="17"
|
||||
language=Languages.C.value, standard="23"
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -190,6 +202,10 @@ def f77(self):
|
||||
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return self._maybe_return_attribute("stdcxx_libs", lang=Languages.CXX)
|
||||
|
||||
|
||||
class DeprecatedCompiler(lang.DeprecatedProperty):
|
||||
def __init__(self) -> None:
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -337,7 +338,15 @@ def from_legacy_yaml(compiler_dict: Dict[str, Any]) -> List[spack.spec.Spec]:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
|
||||
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
|
||||
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
|
||||
try:
|
||||
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"[{__name__}] cannot detect {pkg_name} from the "
|
||||
f"following paths: {', '.join(filtered_paths)}"
|
||||
)
|
||||
continue
|
||||
|
||||
for s in detected:
|
||||
for key in ("flags", "environment", "extra_rpaths"):
|
||||
if key in compiler_dict:
|
||||
|
@@ -149,12 +149,12 @@ def _getfqdn():
|
||||
return socket.getfqdn()
|
||||
|
||||
|
||||
def reader(version: vn.ConcreteVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
||||
def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]:
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
vn.Version("8"): spack.spec.SpecfileV5,
|
||||
vn.StandardVersion.from_string("5"): spack.spec.SpecfileV1,
|
||||
vn.StandardVersion.from_string("6"): spack.spec.SpecfileV3,
|
||||
vn.StandardVersion.from_string("7"): spack.spec.SpecfileV4,
|
||||
vn.StandardVersion.from_string("8"): spack.spec.SpecfileV5,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
@@ -824,7 +824,7 @@ def check(cond, msg):
|
||||
db = fdata["database"]
|
||||
check("version" in db, "no 'version' in JSON DB.")
|
||||
|
||||
self.db_version = vn.Version(db["version"])
|
||||
self.db_version = vn.StandardVersion.from_string(db["version"])
|
||||
if self.db_version > _DB_VERSION:
|
||||
raise InvalidDatabaseVersionError(self, _DB_VERSION, self.db_version)
|
||||
elif self.db_version < _DB_VERSION:
|
||||
|
@@ -20,7 +20,7 @@
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -93,14 +93,13 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from the spec so we
|
||||
# can look up the implementation of determine_spec_details
|
||||
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||
llnl.util.tty.warn(msg)
|
||||
tty.warn(f"Constructed spec for {spec.name} does not have a string representation")
|
||||
return False
|
||||
|
||||
try:
|
||||
spack.spec.Spec(str(spec))
|
||||
except spack.error.SpackError:
|
||||
llnl.util.tty.warn(
|
||||
tty.warn(
|
||||
"Constructed spec has a string representation but the string"
|
||||
" representation does not evaluate to a valid spec: {0}".format(str(spec))
|
||||
)
|
||||
@@ -109,20 +108,24 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def path_to_dict(search_paths: List[str]):
|
||||
def path_to_dict(search_paths: List[str]) -> Dict[str, str]:
|
||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||
path_to_lib = {}
|
||||
path_to_lib: Dict[str, str] = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
try:
|
||||
with os.scandir(search_path) as entries:
|
||||
path_to_lib.update(
|
||||
{entry.path: entry.name for entry in entries if entry.is_file()}
|
||||
)
|
||||
dir_iter = os.scandir(search_path)
|
||||
except OSError as e:
|
||||
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||
llnl.util.tty.debug(msg)
|
||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
||||
continue
|
||||
with dir_iter as entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
if entry.is_file():
|
||||
path_to_lib[entry.path] = entry.name
|
||||
except OSError as e:
|
||||
tty.debug(f"cannot scan '{search_path}' for external software: {e}")
|
||||
|
||||
return path_to_lib
|
||||
|
||||
|
@@ -34,11 +34,13 @@ class OpenMpi(Package):
|
||||
import collections.abc
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.patch
|
||||
@@ -608,7 +610,7 @@ def _execute_patch(
|
||||
return _execute_patch
|
||||
|
||||
|
||||
def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||
def conditional(*values: Union[str, bool], when: Optional[WhenType] = None):
|
||||
"""Conditional values that can be used in variant declarations."""
|
||||
# _make_when_spec returns None when the condition is statically false.
|
||||
when = _make_when_spec(when)
|
||||
@@ -620,7 +622,7 @@ def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name: str,
|
||||
default: Optional[Any] = None,
|
||||
default: Optional[Union[bool, str, Tuple[str, ...]]] = None,
|
||||
description: str = "",
|
||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||
multi: Optional[bool] = None,
|
||||
@@ -650,11 +652,29 @@ def variant(
|
||||
DirectiveError: If arguments passed to the directive are invalid
|
||||
"""
|
||||
|
||||
# This validation can be removed at runtime and enforced with an audit in Spack v1.0.
|
||||
# For now it's a warning to let people migrate faster.
|
||||
if not (
|
||||
default is None
|
||||
or type(default) in (bool, str)
|
||||
or (type(default) is tuple and all(type(x) is str for x in default))
|
||||
):
|
||||
if isinstance(default, (list, tuple)):
|
||||
did_you_mean = f"default={','.join(str(x) for x in default)!r}"
|
||||
else:
|
||||
did_you_mean = f"default={str(default)!r}"
|
||||
warnings.warn(
|
||||
f"default value for variant '{name}' is not a boolean or string: default={default!r}. "
|
||||
f"Did you mean {did_you_mean}?",
|
||||
stacklevel=3,
|
||||
category=spack.error.SpackAPIWarning,
|
||||
)
|
||||
|
||||
def format_error(msg, pkg):
|
||||
msg += " @*r{{[{0}, variant '{1}']}}"
|
||||
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
|
||||
|
||||
if name in spack.variant.reserved_names:
|
||||
if name in spack.variant.RESERVED_NAMES:
|
||||
|
||||
def _raise_reserved_name(pkg):
|
||||
msg = "The name '%s' is reserved by Spack" % name
|
||||
@@ -665,7 +685,11 @@ def _raise_reserved_name(pkg):
|
||||
# Ensure we have a sequence of allowed variant values, or a
|
||||
# predicate for it.
|
||||
if values is None:
|
||||
if str(default).upper() in ("TRUE", "FALSE"):
|
||||
if (
|
||||
default in (True, False)
|
||||
or type(default) is str
|
||||
and default.upper() in ("TRUE", "FALSE")
|
||||
):
|
||||
values = (True, False)
|
||||
else:
|
||||
values = lambda x: True
|
||||
@@ -698,12 +722,15 @@ def _raise_argument_error(pkg):
|
||||
# or the empty string, as the former indicates that a default
|
||||
# was not set while the latter will make the variant unparsable
|
||||
# from the command line
|
||||
if isinstance(default, tuple):
|
||||
default = ",".join(default)
|
||||
|
||||
if default is None or default == "":
|
||||
|
||||
def _raise_default_not_set(pkg):
|
||||
if default is None:
|
||||
msg = "either a default was not explicitly set, " "or 'None' was used"
|
||||
elif default == "":
|
||||
msg = "either a default was not explicitly set, or 'None' was used"
|
||||
else:
|
||||
msg = "the default cannot be an empty string"
|
||||
raise DirectiveError(format_error(msg, pkg))
|
||||
|
||||
|
@@ -65,7 +65,7 @@ def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if spack.repo.is_package_module(cls.__module__):
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
@@ -144,7 +144,6 @@ class Foo(Package):
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
@@ -566,7 +566,7 @@
|
||||
display_specs,
|
||||
environment_dir_from_name,
|
||||
environment_from_name_or_dir,
|
||||
environment_path_scopes,
|
||||
environment_path_scope,
|
||||
exists,
|
||||
initialize_environment_dir,
|
||||
installed_specs,
|
||||
@@ -603,7 +603,7 @@
|
||||
"display_specs",
|
||||
"environment_dir_from_name",
|
||||
"environment_from_name_or_dir",
|
||||
"environment_path_scopes",
|
||||
"environment_path_scope",
|
||||
"exists",
|
||||
"initialize_environment_dir",
|
||||
"installed_specs",
|
||||
|
@@ -31,7 +31,6 @@
|
||||
import spack.repo
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.spec_list
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
@@ -44,10 +43,10 @@
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
|
||||
from ..enums import ConfigScopePriority
|
||||
from .list import SpecList, SpecListError, SpecListParser
|
||||
|
||||
SpecPair = spack.concretize.SpecPair
|
||||
|
||||
@@ -97,16 +96,15 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
|
||||
return path_str
|
||||
|
||||
|
||||
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
|
||||
for scope in scopes:
|
||||
config = scope.get_section("config")
|
||||
if config and "environments_root" in config["config"]:
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
def ensure_no_disallowed_env_config_mods(scope: spack.config.ConfigScope) -> None:
|
||||
config = scope.get_section("config")
|
||||
if config and "environments_root" in config["config"]:
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
|
||||
|
||||
def default_manifest_yaml():
|
||||
@@ -933,8 +931,10 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self.new_specs: List[Spec] = []
|
||||
self.views: Dict[str, ViewDescriptor] = {}
|
||||
|
||||
#: Parser for spec lists
|
||||
self._spec_lists_parser = SpecListParser()
|
||||
#: Specs from "spack.yaml"
|
||||
self.spec_lists: Dict[str, SpecList] = {user_speclist_name: SpecList()}
|
||||
self.spec_lists: Dict[str, SpecList] = {}
|
||||
#: User specs from the last concretization
|
||||
self.concretized_user_specs: List[Spec] = []
|
||||
#: Roots associated with the last concretization, in order
|
||||
@@ -1002,26 +1002,6 @@ def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||
|
||||
def _process_definition(self, entry):
|
||||
"""Process a single spec definition item."""
|
||||
when_string = entry.get("when")
|
||||
if when_string is not None:
|
||||
when = spack.spec.eval_conditional(when_string)
|
||||
assert len([x for x in entry if x != "when"]) == 1
|
||||
else:
|
||||
when = True
|
||||
assert len(entry) == 1
|
||||
|
||||
if when:
|
||||
for name, spec_list in entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||
"""Process view option(s), which can be boolean, string, or None.
|
||||
|
||||
@@ -1069,7 +1049,11 @@ def add_view(name, values):
|
||||
|
||||
def _process_concrete_includes(self):
|
||||
"""Extract and load into memory included concrete spec data."""
|
||||
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||
_included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||
# Expand config and environment variables
|
||||
self.included_concrete_envs = [
|
||||
spack.util.path.canonicalize_path(_env) for _env in _included_concrete_envs
|
||||
]
|
||||
|
||||
if self.included_concrete_envs:
|
||||
if os.path.exists(self.lock_path):
|
||||
@@ -1083,21 +1067,24 @@ def _process_concrete_includes(self):
|
||||
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Set up user specs and views from the manifest file."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.views = {}
|
||||
self._sync_speclists()
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
for item in spack.config.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
def _sync_speclists(self):
|
||||
self.spec_lists = {}
|
||||
self.spec_lists.update(
|
||||
self._spec_lists_parser.parse_definitions(
|
||||
data=spack.config.CONFIG.get("definitions", [])
|
||||
)
|
||||
)
|
||||
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
||||
self.spec_lists[user_speclist_name] = self._spec_lists_parser.parse_user_specs(
|
||||
name=user_speclist_name, yaml_list=spec_list
|
||||
)
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
def all_concretized_user_specs(self) -> List[Spec]:
|
||||
"""Returns all of the concretized user specs of the environment and
|
||||
@@ -1168,9 +1155,7 @@ def clear(self, re_read=False):
|
||||
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
|
||||
and needs to be maintained when re-reading an existing environment.
|
||||
"""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self.spec_lists = {}
|
||||
self._dev_specs = {}
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
@@ -1277,22 +1262,6 @@ def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
def update_stale_references(self, from_list=None):
|
||||
"""Iterate over spec lists updating references."""
|
||||
if not from_list:
|
||||
from_list = next(iter(self.spec_lists.keys()))
|
||||
index = list(self.spec_lists.keys()).index(from_list)
|
||||
|
||||
# spec_lists is an OrderedDict to ensure lists read from the manifest
|
||||
# are maintainted in order, hence, all list entries after the modified
|
||||
# list may refer to the modified list requiring stale references to be
|
||||
# updated.
|
||||
for i, (name, speclist) in enumerate(
|
||||
list(self.spec_lists.items())[index + 1 :], index + 1
|
||||
):
|
||||
new_reference = dict((n, self.spec_lists[n]) for n in list(self.spec_lists.keys())[:i])
|
||||
speclist.update_reference(new_reference)
|
||||
|
||||
def add(self, user_spec, list_name=user_speclist_name):
|
||||
"""Add a single user_spec (non-concretized) to the Environment
|
||||
|
||||
@@ -1312,18 +1281,17 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||
if spec.name not in virtuals:
|
||||
msg = "no such package: %s" % spec.name
|
||||
raise SpackEnvironmentError(msg)
|
||||
raise SpackEnvironmentError(f"no such package: {spec.name}")
|
||||
|
||||
list_to_change = self.spec_lists[list_name]
|
||||
existing = str(spec) in list_to_change.yaml_list
|
||||
if not existing:
|
||||
list_to_change.add(str(spec))
|
||||
self.update_stale_references(list_name)
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.add_user_spec(str(user_spec))
|
||||
else:
|
||||
self.manifest.add_definition(str(user_spec), list_name=list_name)
|
||||
self._sync_speclists()
|
||||
|
||||
return bool(not existing)
|
||||
|
||||
@@ -1367,18 +1335,17 @@ def change_existing_spec(
|
||||
"There are no specs named {0} in {1}".format(match_spec.name, list_name)
|
||||
)
|
||||
elif len(matches) > 1 and not allow_changing_multiple_specs:
|
||||
raise ValueError("{0} matches multiple specs".format(str(match_spec)))
|
||||
raise ValueError(f"{str(match_spec)} matches multiple specs")
|
||||
|
||||
for idx, spec in matches:
|
||||
override_spec = Spec.override(spec, change_spec)
|
||||
self.spec_lists[list_name].replace(idx, str(override_spec))
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.override_user_spec(str(override_spec), idx=idx)
|
||||
else:
|
||||
self.manifest.override_definition(
|
||||
str(spec), override=str(override_spec), list_name=list_name
|
||||
)
|
||||
self.update_stale_references(from_list=list_name)
|
||||
self._sync_speclists()
|
||||
|
||||
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
@@ -1406,22 +1373,17 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
raise SpackEnvironmentError(f"{err_msg_header}, no spec matches")
|
||||
|
||||
old_specs = set(self.user_specs)
|
||||
new_specs = set()
|
||||
|
||||
# Remove specs from the appropriate spec list
|
||||
for spec in matches:
|
||||
if spec not in list_to_change:
|
||||
continue
|
||||
try:
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError as e:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
except SpecListError as e:
|
||||
msg = str(e)
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
new_specs.remove(spec)
|
||||
tty.warn(msg)
|
||||
else:
|
||||
if list_name == user_speclist_name:
|
||||
@@ -1429,7 +1391,11 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
else:
|
||||
self.manifest.remove_definition(str(spec), list_name=list_name)
|
||||
|
||||
# If force, update stale concretized specs
|
||||
# Recompute "definitions" and user specs
|
||||
self._sync_speclists()
|
||||
new_specs = set(self.user_specs)
|
||||
|
||||
# If 'force', update stale concretized specs
|
||||
for spec in old_specs - new_specs:
|
||||
if force and spec in self.concretized_user_specs:
|
||||
i = self.concretized_user_specs.index(spec)
|
||||
@@ -1643,23 +1609,6 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
|
||||
# Re-attach information on test dependencies
|
||||
if tests:
|
||||
# This is slow, but the information on test dependency is lost
|
||||
# after unification or when reading from a lockfile.
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
return concretized_specs
|
||||
|
||||
@property
|
||||
@@ -2367,8 +2316,12 @@ def update_environment_repository(self) -> None:
|
||||
|
||||
def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
"""Add the root node of the spec to the environment repository"""
|
||||
repository_dir = os.path.join(self.repos_path, spec_node.namespace)
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
namespace: str = spec_node.namespace
|
||||
repository = spack.repo.create_or_construct(
|
||||
root=os.path.join(self.repos_path, namespace),
|
||||
namespace=namespace,
|
||||
package_api=spack.repo.PATH.get_repo(namespace).package_api,
|
||||
)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
@@ -2717,9 +2670,9 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
|
||||
self.scope_name = f"env:{self.name}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
#: Configuration scopes associated with this environment. Note that these are not
|
||||
#: Configuration scope associated with this environment. Note that this is not
|
||||
#: invalidated by a re-read of the manifest file.
|
||||
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
|
||||
self._env_config_scope: Optional[spack.config.ConfigScope] = None
|
||||
|
||||
if not self.manifest_file.exists():
|
||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||
@@ -2828,6 +2781,8 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@@ -2854,6 +2809,8 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@@ -2879,6 +2836,8 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# "definitions" can be remote, so we need to update the global config too
|
||||
spack.config.CONFIG.set("definitions", defs, scope=self.scope_name)
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
@@ -2957,33 +2916,27 @@ def __str__(self):
|
||||
return str(self.manifest_file)
|
||||
|
||||
@property
|
||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||
if self._config_scopes is not None:
|
||||
return self._config_scopes
|
||||
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
spack.config.SingleFileScope(
|
||||
def env_config_scope(self) -> spack.config.ConfigScope:
|
||||
"""The configuration scope for the environment manifest"""
|
||||
if self._env_config_scope is None:
|
||||
self._env_config_scope = spack.config.SingleFileScope(
|
||||
self.scope_name,
|
||||
str(self.manifest_file),
|
||||
spack.schema.env.schema,
|
||||
yaml_path=[TOP_LEVEL_KEY],
|
||||
)
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
self._config_scopes = scopes
|
||||
return scopes
|
||||
ensure_no_disallowed_env_config_mods(self._env_config_scope)
|
||||
return self._env_config_scope
|
||||
|
||||
def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
|
||||
"""Add the manifest's scope to the global configuration search path."""
|
||||
spack.config.CONFIG.push_scope(
|
||||
self.env_config_scope, priority=ConfigScopePriority.ENVIRONMENT
|
||||
)
|
||||
|
||||
def deactivate_config_scope(self) -> None:
|
||||
"""Remove any of the manifest's scopes from the global config path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
"""Remove the manifest's scope from the global config path."""
|
||||
spack.config.CONFIG.remove_scope(self.env_config_scope.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_config(self):
|
||||
@@ -2994,8 +2947,8 @@ def use_config(self):
|
||||
self.deactivate_config_scope()
|
||||
|
||||
|
||||
def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.ConfigScope]]:
|
||||
"""Retrieve the suitably named environment path scopes
|
||||
def environment_path_scope(name: str, path: str) -> Optional[spack.config.ConfigScope]:
|
||||
"""Retrieve the suitably named environment path scope
|
||||
|
||||
Arguments:
|
||||
name: configuration scope name
|
||||
@@ -3010,11 +2963,9 @@ def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.
|
||||
else:
|
||||
return None
|
||||
|
||||
for scope in manifest.env_config_scopes:
|
||||
scope.name = f"{name}:{scope.name}"
|
||||
scope.writable = False
|
||||
|
||||
return manifest.env_config_scopes
|
||||
manifest.env_config_scope.name = f"{name}:{manifest.env_config_scope.name}"
|
||||
manifest.env_config_scope.writable = False
|
||||
return manifest.env_config_scope
|
||||
|
||||
|
||||
class SpackEnvironmentError(spack.error.SpackError):
|
||||
|
286
lib/spack/spack/environment/list.py
Normal file
286
lib/spack/spack/environment/list.py
Normal file
@@ -0,0 +1,286 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
||||
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml
|
||||
import spack.variant
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
class SpecList:
|
||||
def __init__(self, *, name: str = "specs", yaml_list=None, expanded_list=None):
|
||||
self.name = name
|
||||
self.yaml_list = yaml_list[:] if yaml_list is not None else []
|
||||
# Expansions can be expensive to compute and difficult to keep updated
|
||||
# We cache results and invalidate when self.yaml_list changes
|
||||
self.specs_as_yaml_list = expanded_list or []
|
||||
self._constraints = None
|
||||
self._specs: Optional[List[Spec]] = None
|
||||
|
||||
@property
|
||||
def is_matrix(self):
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def specs_as_constraints(self):
|
||||
if self._constraints is None:
|
||||
constraints = []
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict): # matrix of specs
|
||||
constraints.extend(_expand_matrix_constraints(item))
|
||||
else: # individual spec
|
||||
constraints.append([Spec(item)])
|
||||
self._constraints = constraints
|
||||
|
||||
return self._constraints
|
||||
|
||||
@property
|
||||
def specs(self) -> List[Spec]:
|
||||
if self._specs is None:
|
||||
specs: List[Spec] = []
|
||||
# This could be slightly faster done directly from yaml_list,
|
||||
# but this way is easier to maintain.
|
||||
for constraint_list in self.specs_as_constraints:
|
||||
spec = constraint_list[0].copy()
|
||||
for const in constraint_list[1:]:
|
||||
spec.constrain(const)
|
||||
specs.append(spec)
|
||||
self._specs = specs
|
||||
|
||||
return self._specs
|
||||
|
||||
def add(self, spec: Spec):
|
||||
spec_str = str(spec)
|
||||
self.yaml_list.append(spec_str)
|
||||
|
||||
# expanded list can be updated without invalidation
|
||||
if self.specs_as_yaml_list is not None:
|
||||
self.specs_as_yaml_list.append(spec_str)
|
||||
|
||||
# Invalidate cache variables when we change the list
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def remove(self, spec):
|
||||
# Get spec to remove from list
|
||||
remove = [
|
||||
s
|
||||
for s in self.yaml_list
|
||||
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
|
||||
]
|
||||
if not remove:
|
||||
msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
|
||||
msg += f"Either {spec} is not in {self.name} or {spec} is "
|
||||
msg += "expanded from a matrix and cannot be removed directly."
|
||||
raise SpecListError(msg)
|
||||
|
||||
# Remove may contain more than one string representation of the same spec
|
||||
for item in remove:
|
||||
self.yaml_list.remove(item)
|
||||
self.specs_as_yaml_list.remove(item)
|
||||
|
||||
# invalidate cache variables when we change the list
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def extend(self, other: "SpecList", copy_reference=True) -> None:
|
||||
self.yaml_list.extend(other.yaml_list)
|
||||
self.specs_as_yaml_list.extend(other.specs_as_yaml_list)
|
||||
self._constraints = None
|
||||
self._specs = None
|
||||
|
||||
def __len__(self):
|
||||
return len(self.specs)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.specs[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.specs)
|
||||
|
||||
|
||||
def _expand_matrix_constraints(matrix_config):
|
||||
# recurse so we can handle nested matrices
|
||||
expanded_rows = []
|
||||
for row in matrix_config["matrix"]:
|
||||
new_row = []
|
||||
for r in row:
|
||||
if isinstance(r, dict):
|
||||
# Flatten the nested matrix into a single row of constraints
|
||||
new_row.extend(
|
||||
[
|
||||
[" ".join([str(c) for c in expanded_constraint_list])]
|
||||
for expanded_constraint_list in _expand_matrix_constraints(r)
|
||||
]
|
||||
)
|
||||
else:
|
||||
new_row.append([r])
|
||||
expanded_rows.append(new_row)
|
||||
|
||||
excludes = matrix_config.get("exclude", []) # only compute once
|
||||
sigil = matrix_config.get("sigil", "")
|
||||
|
||||
results = []
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
test_spec.constrain(constraint)
|
||||
|
||||
# Abstract variants don't have normal satisfaction semantics
|
||||
# Convert all variants to concrete types.
|
||||
# This method is best effort, so all existing variants will be
|
||||
# converted before any error is raised.
|
||||
# Catch exceptions because we want to be able to operate on
|
||||
# abstract specs without needing package information
|
||||
try:
|
||||
spack.spec.substitute_abstract_variants(test_spec)
|
||||
except spack.variant.UnknownVariantError:
|
||||
pass
|
||||
|
||||
# Resolve abstract hashes for exclusion criteria
|
||||
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
if sigil:
|
||||
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
|
||||
|
||||
# Add to list of constraints
|
||||
results.append(flat_combo)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _sigilify(item, sigil):
|
||||
if isinstance(item, dict):
|
||||
if sigil:
|
||||
item["sigil"] = sigil
|
||||
return item
|
||||
else:
|
||||
return sigil + item
|
||||
|
||||
|
||||
class Definition(NamedTuple):
|
||||
name: str
|
||||
yaml_list: List[Union[str, Dict]]
|
||||
when: Optional[str]
|
||||
|
||||
|
||||
class SpecListParser:
|
||||
"""Parse definitions and user specs from data in environments"""
|
||||
|
||||
def __init__(self):
|
||||
self.definitions: Dict[str, SpecList] = {}
|
||||
|
||||
def parse_definitions(self, *, data: List[Dict[str, Any]]) -> Dict[str, SpecList]:
|
||||
definitions_from_yaml: Dict[str, List[Definition]] = {}
|
||||
for item in data:
|
||||
value = self._parse_yaml_definition(item)
|
||||
definitions_from_yaml.setdefault(value.name, []).append(value)
|
||||
|
||||
self.definitions = {}
|
||||
self._build_definitions(definitions_from_yaml)
|
||||
return self.definitions
|
||||
|
||||
def parse_user_specs(self, *, name, yaml_list) -> SpecList:
|
||||
definition = Definition(name=name, yaml_list=yaml_list, when=None)
|
||||
return self._speclist_from_definitions(name, [definition])
|
||||
|
||||
def _parse_yaml_definition(self, yaml_entry) -> Definition:
|
||||
when_string = yaml_entry.get("when")
|
||||
|
||||
if (when_string and len(yaml_entry) > 2) or (not when_string and len(yaml_entry) > 1):
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||
attributes = ", ".join(x for x in yaml_entry if x != "when")
|
||||
error_msg = f"definition must have a single attribute, got many: {attributes}"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
for name, yaml_list in yaml_entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
return Definition(name=name, yaml_list=yaml_list, when=when_string)
|
||||
|
||||
# If we are here, it means only "when" is in the entry
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(yaml_entry)
|
||||
error_msg = "definition must have a single attribute, got none"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
def _build_definitions(self, definitions_from_yaml: Dict[str, List[Definition]]):
|
||||
for name, definitions in definitions_from_yaml.items():
|
||||
self.definitions[name] = self._speclist_from_definitions(name, definitions)
|
||||
|
||||
def _speclist_from_definitions(self, name, definitions) -> SpecList:
|
||||
combined_yaml_list = []
|
||||
for def_part in definitions:
|
||||
if def_part.when is not None and not spack.spec.eval_conditional(def_part.when):
|
||||
continue
|
||||
combined_yaml_list.extend(def_part.yaml_list)
|
||||
expanded_list = self._expand_yaml_list(combined_yaml_list)
|
||||
return SpecList(name=name, yaml_list=combined_yaml_list, expanded_list=expanded_list)
|
||||
|
||||
def _expand_yaml_list(self, raw_yaml_list):
|
||||
result = []
|
||||
for item in raw_yaml_list:
|
||||
if isinstance(item, str) and item.startswith("$"):
|
||||
result.extend(self._expand_reference(item))
|
||||
continue
|
||||
|
||||
value = item
|
||||
if isinstance(item, dict):
|
||||
value = self._expand_yaml_matrix(item)
|
||||
result.append(value)
|
||||
return result
|
||||
|
||||
def _expand_reference(self, item: str):
|
||||
sigil, name = "", item[1:]
|
||||
if name.startswith("^") or name.startswith("%"):
|
||||
sigil, name = name[0], name[1:]
|
||||
|
||||
if name not in self.definitions:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(item)
|
||||
error_msg = f"trying to expand the name '{name}', which is not defined yet"
|
||||
raise UndefinedReferenceError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
value = self.definitions[name].specs_as_yaml_list
|
||||
if not sigil:
|
||||
return value
|
||||
return [_sigilify(x, sigil) for x in value]
|
||||
|
||||
def _expand_yaml_matrix(self, matrix_yaml):
|
||||
extra_attributes = set(matrix_yaml) - {"matrix", "exclude"}
|
||||
if extra_attributes:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||
error_msg = f"extra attributes in spec matrix: {','.join(sorted(extra_attributes))}"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
if "matrix" not in matrix_yaml:
|
||||
mark = spack.util.spack_yaml.get_mark_from_yaml_data(matrix_yaml)
|
||||
error_msg = "matrix is missing the 'matrix' attribute"
|
||||
raise SpecListError(f"{mark.name}:{mark.line + 1}: {error_msg}")
|
||||
|
||||
# Assume data has been validated against the YAML schema
|
||||
result = {"matrix": [self._expand_yaml_list(row) for row in matrix_yaml["matrix"]]}
|
||||
if "exclude" in matrix_yaml:
|
||||
result["exclude"] = matrix_yaml["exclude"]
|
||||
return result
|
||||
|
||||
|
||||
class SpecListError(SpackError):
|
||||
"""Error class for all errors related to SpecList objects."""
|
||||
|
||||
|
||||
class UndefinedReferenceError(SpecListError):
|
||||
"""Error class for undefined references in Spack stacks."""
|
||||
|
||||
|
||||
class InvalidSpecConstraintError(SpecListError):
|
||||
"""Error class for invalid spec constraints at concretize time."""
|
@@ -49,10 +49,23 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
if prompt:
|
||||
old_prompt = os.environ.get("SPACK_OLD_PROMPT")
|
||||
if not old_prompt:
|
||||
old_prompt = os.environ.get("PROMPT")
|
||||
cmds += f'set "SPACK_OLD_PROMPT={old_prompt}"\n'
|
||||
cmds += f'set "PROMPT={prompt} $P$G"\n'
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
|
||||
if prompt:
|
||||
cmds += (
|
||||
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
|
||||
' | Split-Path -leaf; if(!"$Env:SPACK_OLD_PROMPT") '
|
||||
'{$Env:SPACK_OLD_PROMPT="[spack] PS $pth>"}; '
|
||||
'"%s PS $pth>"}\n' % prompt
|
||||
)
|
||||
else:
|
||||
bash_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=True)
|
||||
zsh_color_prompt = colorize(f"@G{{{prompt}}}", color=True, enclose=False, zsh=True)
|
||||
@@ -107,10 +120,19 @@ def deactivate_header(shell):
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
cmds += 'set "SPACK_ENV_VIEW="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
old_prompt = os.environ.get("SPACK_OLD_PROMPT")
|
||||
if old_prompt:
|
||||
cmds += f'set "PROMPT={old_prompt}"\n'
|
||||
cmds += 'set "SPACK_OLD_PROMPT="\n'
|
||||
elif shell == "pwsh":
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
|
||||
cmds += (
|
||||
"function global:prompt { $pth = $(Convert-Path $(Get-Location))"
|
||||
' | Split-Path -leaf; $spack_prompt = "[spack] $pth >"; '
|
||||
'if("$Env:SPACK_OLD_PROMPT") {$spack_prompt=$Env:SPACK_OLD_PROMPT};'
|
||||
" $spack_prompt}\n"
|
||||
)
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
@@ -202,3 +202,16 @@ class MirrorError(SpackError):
|
||||
|
||||
def __init__(self, msg, long_msg=None):
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class NoChecksumException(SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
@@ -27,11 +27,14 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
from typing import Callable, List, Mapping, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
@@ -219,6 +222,114 @@ def mirror_id(self):
|
||||
"""BundlePackages don't have a mirror id."""
|
||||
|
||||
|
||||
def _format_speed(total_bytes: int, elapsed: float) -> str:
|
||||
"""Return a human-readable average download speed string."""
|
||||
elapsed = 1 if elapsed <= 0 else elapsed # avoid divide by zero
|
||||
speed = total_bytes / elapsed
|
||||
if speed >= 1e9:
|
||||
return f"{speed / 1e9:6.1f} GB/s"
|
||||
elif speed >= 1e6:
|
||||
return f"{speed / 1e6:6.1f} MB/s"
|
||||
elif speed >= 1e3:
|
||||
return f"{speed / 1e3:6.1f} KB/s"
|
||||
return f"{speed:6.1f} B/s"
|
||||
|
||||
|
||||
def _format_bytes(total_bytes: int) -> str:
|
||||
"""Return a human-readable total bytes string."""
|
||||
if total_bytes >= 1e9:
|
||||
return f"{total_bytes / 1e9:7.2f} GB"
|
||||
elif total_bytes >= 1e6:
|
||||
return f"{total_bytes / 1e6:7.2f} MB"
|
||||
elif total_bytes >= 1e3:
|
||||
return f"{total_bytes / 1e3:7.2f} KB"
|
||||
return f"{total_bytes:7.2f} B"
|
||||
|
||||
|
||||
class FetchProgress:
|
||||
#: Characters to rotate in the spinner.
|
||||
spinner = ["|", "/", "-", "\\"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
total_bytes: Optional[int] = None,
|
||||
enabled: bool = True,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
) -> None:
|
||||
"""Initialize a FetchProgress instance.
|
||||
Args:
|
||||
total_bytes: Total number of bytes to download, if known.
|
||||
enabled: Whether to print progress information.
|
||||
get_time: Function to get the current time."""
|
||||
#: Number of bytes downloaded so far.
|
||||
self.current_bytes = 0
|
||||
#: Delta time between progress prints
|
||||
self.delta = 0.1
|
||||
#: Whether to print progress information.
|
||||
self.enabled = enabled
|
||||
#: Function to get the current time.
|
||||
self.get_time = get_time
|
||||
#: Time of last progress print to limit output
|
||||
self.last_printed = 0.0
|
||||
#: Time of start of download
|
||||
self.start_time = get_time() if enabled else 0.0
|
||||
#: Total number of bytes to download, if known.
|
||||
self.total_bytes = total_bytes if total_bytes and total_bytes > 0 else 0
|
||||
#: Index of spinner character to print (used if total bytes is unknown)
|
||||
self.index = 0
|
||||
|
||||
@classmethod
|
||||
def from_headers(
|
||||
cls,
|
||||
headers: Mapping[str, str],
|
||||
enabled: bool = True,
|
||||
get_time: Callable[[], float] = time.time,
|
||||
) -> "FetchProgress":
|
||||
"""Create a FetchProgress instance from HTTP headers."""
|
||||
# headers.get is case-insensitive if it's from a HTTPResponse object.
|
||||
content_length = headers.get("Content-Length")
|
||||
try:
|
||||
total_bytes = int(content_length) if content_length else None
|
||||
except ValueError:
|
||||
total_bytes = None
|
||||
return cls(total_bytes=total_bytes, enabled=enabled, get_time=get_time)
|
||||
|
||||
def advance(self, num_bytes: int, out=sys.stdout) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
self.current_bytes += num_bytes
|
||||
self.print(out=out)
|
||||
|
||||
def print(self, final: bool = False, out=sys.stdout) -> None:
|
||||
if not self.enabled:
|
||||
return
|
||||
current_time = self.get_time()
|
||||
if self.last_printed + self.delta < current_time or final:
|
||||
self.last_printed = current_time
|
||||
# print a newline if this is the final update
|
||||
maybe_newline = "\n" if final else ""
|
||||
# if we know the total bytes, show a percentage, otherwise a spinner
|
||||
if self.total_bytes > 0:
|
||||
percentage = min(100 * self.current_bytes / self.total_bytes, 100.0)
|
||||
percent_or_spinner = f"[{percentage:3.0f}%] "
|
||||
else:
|
||||
# only show the spinner if we are not at 100%
|
||||
if final:
|
||||
percent_or_spinner = "[100%] "
|
||||
else:
|
||||
percent_or_spinner = f"[ {self.spinner[self.index]} ] "
|
||||
self.index = (self.index + 1) % len(self.spinner)
|
||||
|
||||
print(
|
||||
f"\r {percent_or_spinner}{_format_bytes(self.current_bytes)} "
|
||||
f"@ {_format_speed(self.current_bytes, current_time - self.start_time)}"
|
||||
f"{maybe_newline}",
|
||||
end="",
|
||||
flush=True,
|
||||
file=out,
|
||||
)
|
||||
|
||||
|
||||
@fetcher
|
||||
class URLFetchStrategy(FetchStrategy):
|
||||
"""URLFetchStrategy pulls source code from a URL for an archive, check the
|
||||
@@ -295,8 +406,9 @@ def fetch(self):
|
||||
)
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
return self._fetch_curl(url)
|
||||
fetch_method = spack.config.get("config:url_fetch_method", "urllib")
|
||||
if fetch_method.startswith("curl"):
|
||||
return self._fetch_curl(url, config_args=fetch_method.split()[1:])
|
||||
else:
|
||||
return self._fetch_urllib(url)
|
||||
|
||||
@@ -315,7 +427,7 @@ def _check_headers(self, headers):
|
||||
tty.warn(msg)
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
def _fetch_urllib(self, url, chunk_size=65536):
|
||||
save_file = self.stage.save_filename
|
||||
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
@@ -326,8 +438,15 @@ def _fetch_urllib(self, url):
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
tty.msg(f"Fetching {url}")
|
||||
progress = FetchProgress.from_headers(response.headers, enabled=sys.stdout.isatty())
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
while True:
|
||||
chunk = response.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
progress.advance(len(chunk))
|
||||
progress.print(final=True)
|
||||
except OSError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
@@ -345,7 +464,7 @@ def _fetch_urllib(self, url):
|
||||
self._check_headers(str(response.headers))
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
def _fetch_curl(self, url, config_args=[]):
|
||||
save_file = None
|
||||
partial_file = None
|
||||
if self.stage.save_filename:
|
||||
@@ -374,7 +493,7 @@ def _fetch_curl(self, url):
|
||||
timeout = self.extra_options.get("timeout")
|
||||
|
||||
base_args = web_util.base_curl_fetch_args(url, timeout)
|
||||
curl_args = save_args + base_args + cookie_args
|
||||
curl_args = config_args + save_args + base_args + cookie_args
|
||||
|
||||
# Run curl but grab the mime type from the http headers
|
||||
curl = self.curl
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from collections import Counter, OrderedDict
|
||||
from typing import Callable, List, Optional, Tuple, Type, TypeVar, Union
|
||||
from typing import Callable, Iterable, List, Optional, Tuple, Type, TypeVar, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -391,7 +391,7 @@ def phase_tests(self, builder, phase_name: str, method_names: List[str]):
|
||||
if self.test_failures:
|
||||
raise TestFailure(self.test_failures)
|
||||
|
||||
def stand_alone_tests(self, kwargs):
|
||||
def stand_alone_tests(self, kwargs, timeout: Optional[int] = None) -> None:
|
||||
"""Run the package's stand-alone tests.
|
||||
|
||||
Args:
|
||||
@@ -399,7 +399,9 @@ def stand_alone_tests(self, kwargs):
|
||||
"""
|
||||
import spack.build_environment # avoid circular dependency
|
||||
|
||||
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
|
||||
spack.build_environment.start_build_process(
|
||||
self.pkg, test_process, kwargs, timeout=timeout
|
||||
)
|
||||
|
||||
def parts(self) -> int:
|
||||
"""The total number of (checked) test parts."""
|
||||
@@ -847,7 +849,7 @@ def write_test_summary(counts: "Counter"):
|
||||
class TestSuite:
|
||||
"""The class that manages specs for ``spack test run`` execution."""
|
||||
|
||||
def __init__(self, specs, alias=None):
|
||||
def __init__(self, specs: Iterable[Spec], alias: Optional[str] = None) -> None:
|
||||
# copy so that different test suites have different package objects
|
||||
# even if they contain the same spec
|
||||
self.specs = [spec.copy() for spec in specs]
|
||||
@@ -855,42 +857,43 @@ def __init__(self, specs, alias=None):
|
||||
self.current_base_spec = None # spec currently running do_test
|
||||
|
||||
self.alias = alias
|
||||
self._hash = None
|
||||
self._stage = None
|
||||
self._hash: Optional[str] = None
|
||||
self._stage: Optional[Prefix] = None
|
||||
|
||||
self.counts: "Counter" = Counter()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""The name (alias or, if none, hash) of the test suite."""
|
||||
return self.alias if self.alias else self.content_hash
|
||||
|
||||
@property
|
||||
def content_hash(self):
|
||||
def content_hash(self) -> str:
|
||||
"""The hash used to uniquely identify the test suite."""
|
||||
if not self._hash:
|
||||
json_text = sjson.dump(self.to_dict())
|
||||
assert json_text is not None, f"{__name__} unexpected value for 'json_text'"
|
||||
sha = hashlib.sha1(json_text.encode("utf-8"))
|
||||
b32_hash = base64.b32encode(sha.digest()).lower()
|
||||
b32_hash = b32_hash.decode("utf-8")
|
||||
self._hash = b32_hash
|
||||
return self._hash
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
def __call__(
|
||||
self,
|
||||
*,
|
||||
remove_directory: bool = True,
|
||||
dirty: bool = False,
|
||||
fail_first: bool = False,
|
||||
externals: bool = False,
|
||||
timeout: Optional[int] = None,
|
||||
):
|
||||
self.write_reproducibility_data()
|
||||
|
||||
remove_directory = kwargs.get("remove_directory", True)
|
||||
dirty = kwargs.get("dirty", False)
|
||||
fail_first = kwargs.get("fail_first", False)
|
||||
externals = kwargs.get("externals", False)
|
||||
|
||||
for spec in self.specs:
|
||||
try:
|
||||
if spec.package.test_suite:
|
||||
raise TestSuiteSpecError(
|
||||
"Package {} cannot be run in two test suites at once".format(
|
||||
spec.package.name
|
||||
)
|
||||
f"Package {spec.package.name} cannot be run in two test suites at once"
|
||||
)
|
||||
|
||||
# Set up the test suite to know which test is running
|
||||
@@ -905,7 +908,7 @@ def __call__(self, *args, **kwargs):
|
||||
fs.mkdirp(test_dir)
|
||||
|
||||
# run the package tests
|
||||
spec.package.do_test(dirty=dirty, externals=externals)
|
||||
spec.package.do_test(dirty=dirty, externals=externals, timeout=timeout)
|
||||
|
||||
# Clean up on success
|
||||
if remove_directory:
|
||||
@@ -956,15 +959,12 @@ def __call__(self, *args, **kwargs):
|
||||
if failures:
|
||||
raise TestSuiteFailure(failures)
|
||||
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestStatus]:
|
||||
"""Determine the overall test results status for the spec.
|
||||
def test_status(self, spec: spack.spec.Spec, externals: bool) -> TestStatus:
|
||||
"""Returns the overall test results status for the spec.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
externals: ``True`` if externals are to be tested, else ``False``
|
||||
|
||||
Returns:
|
||||
the spec's test status if available or ``None``
|
||||
"""
|
||||
tests_status_file = self.tested_file_for_spec(spec)
|
||||
if not os.path.exists(tests_status_file):
|
||||
@@ -981,109 +981,84 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
|
||||
value = (f.read()).strip("\n")
|
||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||
|
||||
def ensure_stage(self):
|
||||
def ensure_stage(self) -> None:
|
||||
"""Ensure the test suite stage directory exists."""
|
||||
if not os.path.exists(self.stage):
|
||||
fs.mkdirp(self.stage)
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
"""The root test suite stage directory.
|
||||
|
||||
Returns:
|
||||
str: the spec's test stage directory path
|
||||
"""
|
||||
def stage(self) -> Prefix:
|
||||
"""The root test suite stage directory"""
|
||||
if not self._stage:
|
||||
self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash))
|
||||
return self._stage
|
||||
|
||||
@stage.setter
|
||||
def stage(self, value):
|
||||
def stage(self, value: Union[Prefix, str]) -> None:
|
||||
"""Set the value of a non-default stage directory."""
|
||||
self._stage = value if isinstance(value, Prefix) else Prefix(value)
|
||||
|
||||
@property
|
||||
def results_file(self):
|
||||
def results_file(self) -> Prefix:
|
||||
"""The path to the results summary file."""
|
||||
return self.stage.join(results_filename)
|
||||
|
||||
@classmethod
|
||||
def test_pkg_id(cls, spec):
|
||||
def test_pkg_id(cls, spec: Spec) -> str:
|
||||
"""The standard install test package identifier.
|
||||
|
||||
Args:
|
||||
spec: instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the install test package identifier
|
||||
"""
|
||||
return spec.format_path("{name}-{version}-{hash:7}")
|
||||
|
||||
@classmethod
|
||||
def test_log_name(cls, spec):
|
||||
def test_log_name(cls, spec: Spec) -> str:
|
||||
"""The standard log filename for a spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's log filename
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return "%s-test-out.txt" % cls.test_pkg_id(spec)
|
||||
return f"{cls.test_pkg_id(spec)}-test-out.txt"
|
||||
|
||||
def log_file_for_spec(self, spec):
|
||||
def log_file_for_spec(self, spec: Spec) -> Prefix:
|
||||
"""The test log file path for the provided spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the path to the spec's log file
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return self.stage.join(self.test_log_name(spec))
|
||||
|
||||
def test_dir_for_spec(self, spec):
|
||||
def test_dir_for_spec(self, spec: Spec) -> Prefix:
|
||||
"""The path to the test stage directory for the provided spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test stage directory path
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return Prefix(self.stage.join(self.test_pkg_id(spec)))
|
||||
|
||||
@classmethod
|
||||
def tested_file_name(cls, spec):
|
||||
def tested_file_name(cls, spec: Spec) -> str:
|
||||
"""The standard test status filename for the spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test status filename
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return "%s-tested.txt" % cls.test_pkg_id(spec)
|
||||
|
||||
def tested_file_for_spec(self, spec):
|
||||
def tested_file_for_spec(self, spec: Spec) -> str:
|
||||
"""The test status file path for the spec.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
|
||||
Returns:
|
||||
str: the spec's test status file path
|
||||
spec: instance of the spec under test
|
||||
"""
|
||||
return fs.join_path(self.stage, self.tested_file_name(spec))
|
||||
|
||||
@property
|
||||
def current_test_cache_dir(self):
|
||||
def current_test_cache_dir(self) -> str:
|
||||
"""Path to the test stage directory where the current spec's cached
|
||||
build-time files were automatically copied.
|
||||
|
||||
Returns:
|
||||
str: path to the current spec's staged, cached build-time files.
|
||||
|
||||
Raises:
|
||||
TestSuiteSpecError: If there is no spec being tested
|
||||
"""
|
||||
@@ -1095,13 +1070,10 @@ def current_test_cache_dir(self):
|
||||
return self.test_dir_for_spec(base_spec).cache.join(test_spec.name)
|
||||
|
||||
@property
|
||||
def current_test_data_dir(self):
|
||||
def current_test_data_dir(self) -> str:
|
||||
"""Path to the test stage directory where the current spec's custom
|
||||
package (data) files were automatically copied.
|
||||
|
||||
Returns:
|
||||
str: path to the current spec's staged, custom package (data) files
|
||||
|
||||
Raises:
|
||||
TestSuiteSpecError: If there is no spec being tested
|
||||
"""
|
||||
@@ -1112,17 +1084,17 @@ def current_test_data_dir(self):
|
||||
base_spec = self.current_base_spec
|
||||
return self.test_dir_for_spec(base_spec).data.join(test_spec.name)
|
||||
|
||||
def write_test_result(self, spec, result):
|
||||
def write_test_result(self, spec: Spec, result: TestStatus) -> None:
|
||||
"""Write the spec's test result to the test suite results file.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): instance of the spec under test
|
||||
result (str): result from the spec's test execution (e.g, PASSED)
|
||||
spec: instance of the spec under test
|
||||
result: result from the spec's test execution (e.g, PASSED)
|
||||
"""
|
||||
msg = f"{self.test_pkg_id(spec)} {result}"
|
||||
_add_msg_to_file(self.results_file, msg)
|
||||
|
||||
def write_reproducibility_data(self):
|
||||
def write_reproducibility_data(self) -> None:
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.PATH.dump_provenance(spec, repo_cache_path)
|
||||
@@ -1167,12 +1139,12 @@ def from_dict(d):
|
||||
return TestSuite(specs, alias)
|
||||
|
||||
@staticmethod
|
||||
def from_file(filename):
|
||||
def from_file(filename: str) -> "TestSuite":
|
||||
"""Instantiate a TestSuite using the specs and optional alias
|
||||
provided in the given file.
|
||||
|
||||
Args:
|
||||
filename (str): The path to the JSON file containing the test
|
||||
filename: The path to the JSON file containing the test
|
||||
suite specs and optional alias.
|
||||
|
||||
Raises:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user