Compare commits
763 Commits
woptim/spa
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f82de718cd | ||
![]() |
4f6836c878 | ||
![]() |
2806ed2751 | ||
![]() |
92b0cb5e22 | ||
![]() |
f32b5e572a | ||
![]() |
e35c5ec104 | ||
![]() |
60be77f761 | ||
![]() |
69b7c32b5d | ||
![]() |
e2c6914dfe | ||
![]() |
87926e40a9 | ||
![]() |
324d733bf9 | ||
![]() |
07bf35d54b | ||
![]() |
72196ee4a1 | ||
![]() |
738e41d8d2 | ||
![]() |
f3321bdbcf | ||
![]() |
9c6f0392d5 | ||
![]() |
297848c207 | ||
![]() |
e9c2a53d83 | ||
![]() |
77b6923906 | ||
![]() |
8235aa1804 | ||
![]() |
d09c5a4bd4 | ||
![]() |
916755e22a | ||
![]() |
3676381357 | ||
![]() |
de9f92c588 | ||
![]() |
6ba7aa325b | ||
![]() |
c0cbbcfa0a | ||
![]() |
f2dc4ed6d3 | ||
![]() |
38bf1772a0 | ||
![]() |
3460602fb9 | ||
![]() |
a6ce7735e6 | ||
![]() |
4b11266e03 | ||
![]() |
436ff3c818 | ||
![]() |
fa35d8f8ec | ||
![]() |
6f8a3674af | ||
![]() |
39b7276a33 | ||
![]() |
d67afc7191 | ||
![]() |
8823c57b72 | ||
![]() |
c8466c4cd4 | ||
![]() |
f5ff63e68d | ||
![]() |
11f52ce2f6 | ||
![]() |
63895b39f0 | ||
![]() |
64220779d4 | ||
![]() |
774346038e | ||
![]() |
03dbc3035c | ||
![]() |
ad78ed741c | ||
![]() |
599d32d1c2 | ||
![]() |
e5c7fe87aa | ||
![]() |
cc6ab75063 | ||
![]() |
fe00c13afa | ||
![]() |
d610ff6cb1 | ||
![]() |
54f947fc2a | ||
![]() |
a5aa784d69 | ||
![]() |
3bd58f3b49 | ||
![]() |
cac0beaecf | ||
![]() |
406ccc2fe3 | ||
![]() |
40cd8e6ad8 | ||
![]() |
682e4bf4d4 | ||
![]() |
56b2979966 | ||
![]() |
d518aaa4c9 | ||
![]() |
8486a80651 | ||
![]() |
28341ef0a9 | ||
![]() |
f89a2ada4c | ||
![]() |
cf804c4ea8 | ||
![]() |
a45d09abcd | ||
![]() |
cd3068dc0b | ||
![]() |
de9aa3bcc6 | ||
![]() |
db7ab9826d | ||
![]() |
9f69d9b286 | ||
![]() |
d352b71df0 | ||
![]() |
4cb4634c74 | ||
![]() |
594554935d | ||
![]() |
8b56470650 | ||
![]() |
ba4fd64caa | ||
![]() |
07ec8a9ba3 | ||
![]() |
64ba324b4a | ||
![]() |
2aab567782 | ||
![]() |
d4e29c32f0 | ||
![]() |
30e5639995 | ||
![]() |
fa4c09d04e | ||
![]() |
f0a458862f | ||
![]() |
2938680878 | ||
![]() |
a8132e5c94 | ||
![]() |
9875a0e807 | ||
![]() |
cb4d3a9fc2 | ||
![]() |
7d79648cb5 | ||
![]() |
e84e5fa9bf | ||
![]() |
f25cbb0fe4 | ||
![]() |
f3257cea90 | ||
![]() |
d037e658a4 | ||
![]() |
a14acd97bd | ||
![]() |
199cce879f | ||
![]() |
7d66063bd9 | ||
![]() |
47c6fb750a | ||
![]() |
8c3ac352b7 | ||
![]() |
d6ac16ca16 | ||
![]() |
75e37c6db5 | ||
![]() |
3f8dcfc6ed | ||
![]() |
07d4915e82 | ||
![]() |
77ff574d94 | ||
![]() |
5783f950cf | ||
![]() |
1c76c88f2c | ||
![]() |
50b56ee1ce | ||
![]() |
be521c441e | ||
![]() |
61ffb87757 | ||
![]() |
950b4c5847 | ||
![]() |
ac078f262d | ||
![]() |
fd62f0f3a8 | ||
![]() |
ca977ea9e1 | ||
![]() |
0d2c624bcb | ||
![]() |
765b6b7150 | ||
![]() |
a91f96292c | ||
![]() |
18487a45ed | ||
![]() |
29485e2125 | ||
![]() |
7674ea0b7d | ||
![]() |
693376ea97 | ||
![]() |
88bf2a8bcf | ||
![]() |
03e9ca0a76 | ||
![]() |
18399d0bd1 | ||
![]() |
3aabff77d7 | ||
![]() |
aa86342814 | ||
![]() |
170a276f18 | ||
![]() |
313524dc6d | ||
![]() |
5aae6e25a5 | ||
![]() |
b58a52b6ce | ||
![]() |
32760e2885 | ||
![]() |
125feb125c | ||
![]() |
8677063142 | ||
![]() |
f015b18230 | ||
![]() |
aa9e610fa6 | ||
![]() |
7d62045c30 | ||
![]() |
5b03173b99 | ||
![]() |
36fcdb8cfa | ||
![]() |
7d5b17fbf2 | ||
![]() |
d6e3292955 | ||
![]() |
60f54df964 | ||
![]() |
487df807cc | ||
![]() |
cacdf84964 | ||
![]() |
e2293c758f | ||
![]() |
f5a275adf5 | ||
![]() |
615ced32cd | ||
![]() |
bc04d963e5 | ||
![]() |
11051ce5c7 | ||
![]() |
631bddc52e | ||
![]() |
b5f40aa7fb | ||
![]() |
57e0798af2 | ||
![]() |
0161b662f7 | ||
![]() |
aa55b19680 | ||
![]() |
8cfffd88fa | ||
![]() |
2f8dcb8097 | ||
![]() |
5b70fa8cc8 | ||
![]() |
b4025e89ed | ||
![]() |
8db74e1b2f | ||
![]() |
1fcfbadba7 | ||
![]() |
13ec35873f | ||
![]() |
f96b6eac2b | ||
![]() |
933a1a5cd9 | ||
![]() |
b2b9914efc | ||
![]() |
9ce9596981 | ||
![]() |
fc30fe1f6b | ||
![]() |
25a4b98359 | ||
![]() |
05c34b7312 | ||
![]() |
b22842af56 | ||
![]() |
0bef028692 | ||
![]() |
935facd069 | ||
![]() |
87e5255bbc | ||
![]() |
b42f0d793d | ||
![]() |
ccca0d3354 | ||
![]() |
9699bbc7b9 | ||
![]() |
c7e251de9f | ||
![]() |
d788b15529 | ||
![]() |
8e7489bc17 | ||
![]() |
d234df62d7 | ||
![]() |
4a5922a0ec | ||
![]() |
5bd184aaaf | ||
![]() |
464c3b96fa | ||
![]() |
60544a4e84 | ||
![]() |
a664d98f37 | ||
![]() |
0e3d7efb0f | ||
![]() |
a8cd0b99f3 | ||
![]() |
a43df598a1 | ||
![]() |
a7163cd0fa | ||
![]() |
fe171a560b | ||
![]() |
24abc3294a | ||
![]() |
2dea0073b2 | ||
![]() |
31ecefbfd2 | ||
![]() |
7363047b82 | ||
![]() |
12fe7aef65 | ||
![]() |
5da4f18188 | ||
![]() |
61c54ed28b | ||
![]() |
677caec3c6 | ||
![]() |
b914bd6638 | ||
![]() |
3caa3132aa | ||
![]() |
dbd531112c | ||
![]() |
ae5e121502 | ||
![]() |
929cfc8e5a | ||
![]() |
bad28e7f9f | ||
![]() |
3d63fe91b0 | ||
![]() |
95af020310 | ||
![]() |
2147b9d95e | ||
![]() |
68636e7c19 | ||
![]() |
f56675648a | ||
![]() |
3a219d114d | ||
![]() |
3cefa7047c | ||
![]() |
35013773ba | ||
![]() |
e28379e98b | ||
![]() |
93329d7f99 | ||
![]() |
9e508b0321 | ||
![]() |
2c26c429a7 | ||
![]() |
1cc63e2b7c | ||
![]() |
4e311a22d0 | ||
![]() |
3ad99d75f9 | ||
![]() |
b79c01077d | ||
![]() |
4385f36b8d | ||
![]() |
a85f1cfa4b | ||
![]() |
13524fa8ed | ||
![]() |
738c73975e | ||
![]() |
bf9d72f87b | ||
![]() |
674cca3c4a | ||
![]() |
7a95e2beb5 | ||
![]() |
5ab71814a9 | ||
![]() |
e783a2851d | ||
![]() |
29e3a28071 | ||
![]() |
4e7a5e9362 | ||
![]() |
89d1dfa340 | ||
![]() |
974abc8067 | ||
![]() |
2f9ad5f34d | ||
![]() |
9555ceeb8a | ||
![]() |
6cd74efa90 | ||
![]() |
3b3735a2cc | ||
![]() |
2ffbc0d053 | ||
![]() |
a92419ffe4 | ||
![]() |
92c16d085f | ||
![]() |
c94024d51d | ||
![]() |
11915ca568 | ||
![]() |
4729b6e837 | ||
![]() |
2f1978cf2f | ||
![]() |
d4045c1ef3 | ||
![]() |
a0f8aaf4e7 | ||
![]() |
b7a5e9ca03 | ||
![]() |
7e4b8aa020 | ||
![]() |
f5aa15034e | ||
![]() |
f210be30d8 | ||
![]() |
c63741a089 | ||
![]() |
4c99ffd81f | ||
![]() |
1331332dcf | ||
![]() |
910a4e6d22 | ||
![]() |
93f1ec20aa | ||
![]() |
9edbe5aed1 | ||
![]() |
a574c7610b | ||
![]() |
4742f053af | ||
![]() |
b06c5c7e81 | ||
![]() |
03fa150185 | ||
![]() |
b304a2d854 | ||
![]() |
1fa1864b37 | ||
![]() |
0da5bafaf2 | ||
![]() |
f4614a4931 | ||
![]() |
b8ec69112f | ||
![]() |
a3645fd372 | ||
![]() |
9bcd86071f | ||
![]() |
b126335800 | ||
![]() |
d32b6099b3 | ||
![]() |
3e8cb852b0 | ||
![]() |
c8d7aa1772 | ||
![]() |
ec836d740f | ||
![]() |
cacdaaf3a9 | ||
![]() |
165e6b1d5e | ||
![]() |
70f5300cf2 | ||
![]() |
81e08167e2 | ||
![]() |
e9d8c5767b | ||
![]() |
4cefa973cd | ||
![]() |
a2bd221ee4 | ||
![]() |
adbb41c7df | ||
![]() |
2554c7bd21 | ||
![]() |
e274e855f1 | ||
![]() |
e76ebf2cf7 | ||
![]() |
11ba5ebbcd | ||
![]() |
53262b968b | ||
![]() |
39620085d4 | ||
![]() |
78c985fce4 | ||
![]() |
f9e4d3898a | ||
![]() |
75c3d0a053 | ||
![]() |
6afe002c94 | ||
![]() |
f76e01707a | ||
![]() |
738ca8e2c2 | ||
![]() |
817df233fb | ||
![]() |
5ea4d04450 | ||
![]() |
49bf5a349e | ||
![]() |
2427b9649d | ||
![]() |
0cec2c9fc6 | ||
![]() |
c97be2a9d7 | ||
![]() |
3fbdfc464b | ||
![]() |
c4449cb201 | ||
![]() |
1601193e12 | ||
![]() |
9c5b3ccb4e | ||
![]() |
68517389a0 | ||
![]() |
df5ad63331 | ||
![]() |
e79dc4422e | ||
![]() |
f0e5568a54 | ||
![]() |
1a1f0aa07b | ||
![]() |
c4ea924977 | ||
![]() |
57df23a51f | ||
![]() |
97d66b637f | ||
![]() |
92e1807672 | ||
![]() |
e4a8d45d86 | ||
![]() |
d6669845ed | ||
![]() |
60efada6a2 | ||
![]() |
a093a65a25 | ||
![]() |
1524aceb9a | ||
![]() |
9d0766be48 | ||
![]() |
7e89b3521a | ||
![]() |
2e372c53ab | ||
![]() |
8639779002 | ||
![]() |
a0e09139fc | ||
![]() |
b02ac87c55 | ||
![]() |
a9da160160 | ||
![]() |
f1678f4c7b | ||
![]() |
dae3b69f2c | ||
![]() |
cec7e6c4b5 | ||
![]() |
5931236f55 | ||
![]() |
e695185770 | ||
![]() |
5356469ba5 | ||
![]() |
605c3de633 | ||
![]() |
45c4446b90 | ||
![]() |
4ba6407cb8 | ||
![]() |
c221635c79 | ||
![]() |
46ff553ec2 | ||
![]() |
fcc85adc7f | ||
![]() |
da1ac0fdd4 | ||
![]() |
0accf26472 | ||
![]() |
545750873e | ||
![]() |
7d4523a9fc | ||
![]() |
754a64d1fe | ||
![]() |
b11578ed7c | ||
![]() |
c80dcd8f84 | ||
![]() |
aaaf4477c9 | ||
![]() |
27f123efad | ||
![]() |
2b52639032 | ||
![]() |
a472adf2cb | ||
![]() |
79972d7b57 | ||
![]() |
0ffb61e215 | ||
![]() |
cdd261b63f | ||
![]() |
900574ddb3 | ||
![]() |
6bc4af11f4 | ||
![]() |
6d35a75c4f | ||
![]() |
7e65c57861 | ||
![]() |
9213bf5919 | ||
![]() |
ccd205bfeb | ||
![]() |
114bd5744f | ||
![]() |
8ef5f1027a | ||
![]() |
36bc53ee07 | ||
![]() |
236b8fc009 | ||
![]() |
1a42bf043f | ||
![]() |
87cc3280b6 | ||
![]() |
7dc75d5f8c | ||
![]() |
a1bff46435 | ||
![]() |
12e0eb6178 | ||
![]() |
0eb55a0b8f | ||
![]() |
6925a53937 | ||
![]() |
e34f04df5e | ||
![]() |
3fe13f0891 | ||
![]() |
c8d244b621 | ||
![]() |
bc3132f2a9 | ||
![]() |
b79d0bfc80 | ||
![]() |
f678e8af4d | ||
![]() |
9985ecf6a7 | ||
![]() |
5e981797f5 | ||
![]() |
a7b542dd37 | ||
![]() |
4dc1a900e2 | ||
![]() |
8697371d82 | ||
![]() |
61899fcfc1 | ||
![]() |
9697c1934c | ||
![]() |
a011b49e1e | ||
![]() |
ae50757f3c | ||
![]() |
6f1dce95f9 | ||
![]() |
fd59d3e589 | ||
![]() |
0172208c52 | ||
![]() |
02c2516e88 | ||
![]() |
a8e37ccbbb | ||
![]() |
f0f463c8dc | ||
![]() |
a137da1cd5 | ||
![]() |
03e972314f | ||
![]() |
9a7a3d2743 | ||
![]() |
76f00a3659 | ||
![]() |
cd98781fb4 | ||
![]() |
dd16f451fc | ||
![]() |
4f6cd5abde | ||
![]() |
d7f05e08be | ||
![]() |
9747978c7f | ||
![]() |
f043455ccc | ||
![]() |
fb9d6427e6 | ||
![]() |
76e83e10c1 | ||
![]() |
af89bdf632 | ||
![]() |
46f5b192ef | ||
![]() |
18cd922aab | ||
![]() |
5518ad9611 | ||
![]() |
57a1807443 | ||
![]() |
3909308d5c | ||
![]() |
54210270c8 | ||
![]() |
1a71bb046e | ||
![]() |
dbd6857d32 | ||
![]() |
025bc24996 | ||
![]() |
01e16b58a3 | ||
![]() |
f71e202f24 | ||
![]() |
f7edd10c17 | ||
![]() |
153c0805dd | ||
![]() |
5d8517ef69 | ||
![]() |
f23cae6a86 | ||
![]() |
e6e67f8e0a | ||
![]() |
e6bef4ca9b | ||
![]() |
e3e0bef0de | ||
![]() |
42486d93ec | ||
![]() |
6d608a9664 | ||
![]() |
04313afc63 | ||
![]() |
f839d2ba56 | ||
![]() |
2b1a8b1913 | ||
![]() |
8907003648 | ||
![]() |
8afdba4bf7 | ||
![]() |
57cabbfb10 | ||
![]() |
c71efb9040 | ||
![]() |
c5dd2d43d2 | ||
![]() |
34338ef757 | ||
![]() |
c0bdc37226 | ||
![]() |
8bad9fb804 | ||
![]() |
2df7cc0087 | ||
![]() |
40d40ccc52 | ||
![]() |
afe7d6c39e | ||
![]() |
113733d9fb | ||
![]() |
a8e2da5bb8 | ||
![]() |
97750189b6 | ||
![]() |
bcd40835a0 | ||
![]() |
2c3f2c5733 | ||
![]() |
302d74394b | ||
![]() |
cf94dc7823 | ||
![]() |
4411ee3382 | ||
![]() |
f790ce0f72 | ||
![]() |
64d53037db | ||
![]() |
4aef50739b | ||
![]() |
a6e966f6f2 | ||
![]() |
1f428c4188 | ||
![]() |
731e48b1bd | ||
![]() |
74ff9ad821 | ||
![]() |
16a4eff689 | ||
![]() |
d0b0d8db50 | ||
![]() |
54f591cce5 | ||
![]() |
8677bb4d43 | ||
![]() |
b66b80a96a | ||
![]() |
10e21f399c | ||
![]() |
56892f6140 | ||
![]() |
7eddc4b1f8 | ||
![]() |
3c7392bbcc | ||
![]() |
bb0517f4d9 | ||
![]() |
c8994ee50f | ||
![]() |
4b2f5638f2 | ||
![]() |
31312a379f | ||
![]() |
b0d5f272b0 | ||
![]() |
1c93fef160 | ||
![]() |
8bb5f4faf4 | ||
![]() |
f76ab5f72f | ||
![]() |
49c831edc3 | ||
![]() |
c943c8c1d2 | ||
![]() |
e0e6f29584 | ||
![]() |
72bc3bb803 | ||
![]() |
dba8fe2b96 | ||
![]() |
4487598d60 | ||
![]() |
495537cf56 | ||
![]() |
22c3b4099f | ||
![]() |
13978d11a0 | ||
![]() |
a22114b20b | ||
![]() |
c10624390f | ||
![]() |
fb3d9de80b | ||
![]() |
fbb688af07 | ||
![]() |
d34b709425 | ||
![]() |
cb0b188cf6 | ||
![]() |
9a2b0aca66 | ||
![]() |
89a8ab3233 | ||
![]() |
5d87166c07 | ||
![]() |
15c989b3fe | ||
![]() |
b7f556e4b4 | ||
![]() |
36f32ceda3 | ||
![]() |
01d77ed915 | ||
![]() |
0049f8332d | ||
![]() |
39c10c3116 | ||
![]() |
71d1901831 | ||
![]() |
41e0863b86 | ||
![]() |
a75d83f65c | ||
![]() |
f2f13964fb | ||
![]() |
9b032018d6 | ||
![]() |
7d470c05be | ||
![]() |
664fe9e9e6 | ||
![]() |
2745a519e2 | ||
![]() |
4348ee1c75 | ||
![]() |
8e39fb1e54 | ||
![]() |
09458312a3 | ||
![]() |
5fd0693df4 | ||
![]() |
f58684429d | ||
![]() |
409611a479 | ||
![]() |
dd98cfb839 | ||
![]() |
5c91667dab | ||
![]() |
9efd6f3f11 | ||
![]() |
a8f5289801 | ||
![]() |
ac635aa777 | ||
![]() |
45dcddf9c3 | ||
![]() |
f1660722e7 | ||
![]() |
04b44d841c | ||
![]() |
7f30502297 | ||
![]() |
61b1586c51 | ||
![]() |
8579efcadf | ||
![]() |
1c3e2b5425 | ||
![]() |
011ef0aaaf | ||
![]() |
9642f3f49a | ||
![]() |
a6c9b55fad | ||
![]() |
608ed967e1 | ||
![]() |
742eaa32b7 | ||
![]() |
763b35a2e0 | ||
![]() |
12280f864c | ||
![]() |
253ba05732 | ||
![]() |
195b869e1c | ||
![]() |
393961ffd6 | ||
![]() |
392a58e9be | ||
![]() |
0e8e97a811 | ||
![]() |
43a0cbe7a2 | ||
![]() |
bb35a98079 | ||
![]() |
fa7e0e8230 | ||
![]() |
2c128751f5 | ||
![]() |
fb0493a366 | ||
![]() |
6d1b6e7087 | ||
![]() |
759518182c | ||
![]() |
7ebabfcf0e | ||
![]() |
6203ae31d2 | ||
![]() |
6b13017ded | ||
![]() |
2c51b5853f | ||
![]() |
d0cbd056a8 | ||
![]() |
e1b579a8b4 | ||
![]() |
b02dcf697d | ||
![]() |
6e046b04c7 | ||
![]() |
d196795437 | ||
![]() |
0d444fb4e7 | ||
![]() |
467e631260 | ||
![]() |
f21de698f7 | ||
![]() |
59532986be | ||
![]() |
36fd547b40 | ||
![]() |
b5f9dea6d0 | ||
![]() |
5904834295 | ||
![]() |
2da8a1d1e3 | ||
![]() |
d50eba40d9 | ||
![]() |
8d3a733b77 | ||
![]() |
dfa86dce08 | ||
![]() |
3d82e5c573 | ||
![]() |
a77f903f4d | ||
![]() |
92260b179d | ||
![]() |
196c912b8a | ||
![]() |
0f54995e53 | ||
![]() |
9d1332f1a1 | ||
![]() |
40a1da4a73 | ||
![]() |
82e091e2c2 | ||
![]() |
c86112b0e8 | ||
![]() |
bb25c04845 | ||
![]() |
d69d26d9ce | ||
![]() |
06d660b9ba | ||
![]() |
40b3196412 | ||
![]() |
7e893da4a6 | ||
![]() |
13aa8b6867 | ||
![]() |
b0afb619de | ||
![]() |
7a82c703c7 | ||
![]() |
0d3667175a | ||
![]() |
a754341f6c | ||
![]() |
a50c45f00c | ||
![]() |
87e65e5377 | ||
![]() |
50fe96aaf6 | ||
![]() |
56495a8cd8 | ||
![]() |
c054cb818d | ||
![]() |
bc28ec35d1 | ||
![]() |
e47a6059a7 | ||
![]() |
0d170b9ef3 | ||
![]() |
5174cb9180 | ||
![]() |
22ba366e85 | ||
![]() |
13558269b5 | ||
![]() |
615b7a6ddb | ||
![]() |
0415b21d3d | ||
![]() |
053c9d2846 | ||
![]() |
1e763629f6 | ||
![]() |
7568687f1e | ||
![]() |
3b81c0e6b7 | ||
![]() |
c764400338 | ||
![]() |
4e8a6eec1a | ||
![]() |
ebc9f03dda | ||
![]() |
8ac0bd2825 | ||
![]() |
cc9e0137df | ||
![]() |
b8e448afa0 | ||
![]() |
209d670bf3 | ||
![]() |
c6202842ed | ||
![]() |
b2a75db030 | ||
![]() |
0ec00a9c9a | ||
![]() |
5e3020ad02 | ||
![]() |
a0d0e6321f | ||
![]() |
0afac0beaa | ||
![]() |
6155be8548 | ||
![]() |
611cb98b02 | ||
![]() |
ea5742853f | ||
![]() |
25a3e8ba59 | ||
![]() |
7fbb3df6b0 | ||
![]() |
a728db95de | ||
![]() |
7bc4069b9e | ||
![]() |
51fc195d14 | ||
![]() |
27a0593104 | ||
![]() |
f95e27a159 | ||
![]() |
effe433c96 | ||
![]() |
21988fbb18 | ||
![]() |
2db654bf5a | ||
![]() |
9992b563db | ||
![]() |
daba1a805e | ||
![]() |
832bf95aa4 | ||
![]() |
81e6dcd95c | ||
![]() |
518572e710 | ||
![]() |
6f4ac31a67 | ||
![]() |
e291daaa17 | ||
![]() |
58f1e791a0 | ||
![]() |
aba0a740c2 | ||
![]() |
0fe8e763c3 | ||
![]() |
0e2d261b7e | ||
![]() |
85cb234861 | ||
![]() |
87a83db623 | ||
![]() |
e1e17786c5 | ||
![]() |
68af5cc4c0 | ||
![]() |
70df460fa7 | ||
![]() |
31a1b2fd6c | ||
![]() |
f8fd51e12f | ||
![]() |
12784594aa | ||
![]() |
e0eb0aba37 | ||
![]() |
f47bf5f6b8 | ||
![]() |
9296527775 | ||
![]() |
08c53fa405 | ||
![]() |
0c6f0c090d | ||
![]() |
c623448f81 | ||
![]() |
df71341972 | ||
![]() |
75862c456d | ||
![]() |
e680a0c153 | ||
![]() |
9ad36080ca | ||
![]() |
ecd14f0ad9 | ||
![]() |
c44edf1e8d | ||
![]() |
1eacdca5aa | ||
![]() |
4a8f5efb38 | ||
![]() |
2e753571bd | ||
![]() |
da16336550 | ||
![]() |
1818e70e74 | ||
![]() |
1dde785e9a | ||
![]() |
a7af32c23b | ||
![]() |
6c92ad439b | ||
![]() |
93f555eb14 | ||
![]() |
fa3725e9de | ||
![]() |
870dd6206f | ||
![]() |
b1d411ab06 | ||
![]() |
783eccfbd5 | ||
![]() |
a842332b1b | ||
![]() |
7e41288ca6 | ||
![]() |
3bb375a47f | ||
![]() |
478855728f | ||
![]() |
5e3baeabfa | ||
![]() |
58b9b54066 | ||
![]() |
3918deab74 | ||
![]() |
ceb2ce352f | ||
![]() |
7dc6bff7b1 | ||
![]() |
05fbbd7164 | ||
![]() |
58421866c2 | ||
![]() |
962498095d | ||
![]() |
d0217cf04e | ||
![]() |
65745fa0df | ||
![]() |
9f7cff1780 | ||
![]() |
bb43fa5444 | ||
![]() |
847f560a6e | ||
![]() |
623ff835fc | ||
![]() |
ca19790ff2 | ||
![]() |
f23366e4f8 | ||
![]() |
42fb689501 | ||
![]() |
c33bbdb77d | ||
![]() |
1af6aa22c1 | ||
![]() |
03d9373e5c | ||
![]() |
f469a3d6ab | ||
![]() |
25f24d947a | ||
![]() |
af25a84a56 | ||
![]() |
59a71959e7 | ||
![]() |
00e804a94b | ||
![]() |
db997229f2 | ||
![]() |
6fac041d40 | ||
![]() |
f8b2c65ddf | ||
![]() |
c504304d39 | ||
![]() |
976f1c2198 | ||
![]() |
e7c591a8b8 | ||
![]() |
f3522cba74 | ||
![]() |
0bd9c235a0 | ||
![]() |
335fca7049 | ||
![]() |
ce5ef14fdb | ||
![]() |
2447d16e55 | ||
![]() |
8196c68ff3 | ||
![]() |
308f74fe8b | ||
![]() |
864f09fef0 | ||
![]() |
29b53581e2 | ||
![]() |
e441e780b9 | ||
![]() |
4c642df5ae | ||
![]() |
217774c972 | ||
![]() |
8ec89fc54c | ||
![]() |
66ce93a2e3 | ||
![]() |
116ffe5809 | ||
![]() |
6b0ea2db1d | ||
![]() |
d72b371c8a | ||
![]() |
aa88ced154 | ||
![]() |
d89ae7bcde | ||
![]() |
53d1665a8b | ||
![]() |
9fa1654102 | ||
![]() |
2c692a5755 | ||
![]() |
c0df012b18 | ||
![]() |
66e8523e14 | ||
![]() |
3932299768 | ||
![]() |
3eba6b8379 | ||
![]() |
369928200a | ||
![]() |
81ed0f8d87 | ||
![]() |
194b6311e9 | ||
![]() |
8420898f79 | ||
![]() |
f556ba46d9 | ||
![]() |
ddaa9d5d81 | ||
![]() |
b878fe5555 | ||
![]() |
b600bfc779 | ||
![]() |
612c289c41 | ||
![]() |
e42c76cccf | ||
![]() |
25013bacf2 | ||
![]() |
3d554db198 | ||
![]() |
b6def50dcb | ||
![]() |
bf591c96bd | ||
![]() |
edf1d2ec40 | ||
![]() |
07f607ec9f | ||
![]() |
93747c5e24 | ||
![]() |
b746d4596a | ||
![]() |
8814705936 | ||
![]() |
c989541ebc | ||
![]() |
1759ce05dd | ||
![]() |
c0c1a4aea1 | ||
![]() |
53353ae64e | ||
![]() |
62f7a4c9b1 | ||
![]() |
39679d0882 | ||
![]() |
50e6bf9979 | ||
![]() |
b874c31cc8 | ||
![]() |
04baad90f5 | ||
![]() |
1022527923 | ||
![]() |
7ef19ec1d8 | ||
![]() |
6e45b51f27 | ||
![]() |
5f9cd0991b | ||
![]() |
98c44fc351 | ||
![]() |
b99f850c8e | ||
![]() |
cbbd68d16b | ||
![]() |
e4fbf99497 | ||
![]() |
6a225d5405 | ||
![]() |
af9fd82476 | ||
![]() |
29c1152484 | ||
![]() |
d6a8af6a1d | ||
![]() |
3c3dad0a7a | ||
![]() |
109efdff88 | ||
![]() |
fa318e2c92 | ||
![]() |
064e70990d | ||
![]() |
c40139b7d6 | ||
![]() |
c302e1a768 | ||
![]() |
7171015f1c | ||
![]() |
8ab6f33eb6 | ||
![]() |
a66ab9cc6c |
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
|||||||
# 1: Platforms to build for
|
# 1: Platforms to build for
|
||||||
# 2: Base image (e.g. ubuntu:22.04)
|
# 2: Base image (e.g. ubuntu:22.04)
|
||||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
||||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
||||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
|
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -81,6 +81,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
|
|
||||||
|
import-check:
|
||||||
|
needs: [ changes ]
|
||||||
|
uses: ./.github/workflows/import-check.yaml
|
||||||
|
|
||||||
all-prechecks:
|
all-prechecks:
|
||||||
needs: [ prechecks ]
|
needs: [ prechecks ]
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
|
1
.github/workflows/coverage.yml
vendored
1
.github/workflows/coverage.yml
vendored
@@ -33,3 +33,4 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
fail_ci_if_error: false
|
fail_ci_if_error: false
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
49
.github/workflows/import-check.yaml
vendored
Normal file
49
.github/workflows/import-check.yaml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: import-check
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Check we don't make the situation with circular imports worse
|
||||||
|
import-check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: julia-actions/setup-julia@v2
|
||||||
|
with:
|
||||||
|
version: '1.10'
|
||||||
|
- uses: julia-actions/cache@v2
|
||||||
|
|
||||||
|
# PR: use the base of the PR as the old commit
|
||||||
|
- name: Checkout PR base commit
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.base.sha }}
|
||||||
|
path: old
|
||||||
|
# not a PR: use the previous commit as the old commit
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
path: old
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
run: git -C old reset --hard HEAD^
|
||||||
|
|
||||||
|
- name: Checkout new commit
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
path: new
|
||||||
|
- name: Install circular import checker
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
repository: haampie/circular-import-fighter
|
||||||
|
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
|
||||||
|
path: circular-import-fighter
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make -j dependencies
|
||||||
|
- name: Circular import check
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make -j compare "SPACK_ROOT=../old ../new"
|
@@ -1,7 +1,7 @@
|
|||||||
black==24.10.0
|
black==25.1.0
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.1
|
flake8==7.1.2
|
||||||
isort==5.13.2
|
isort==6.0.1
|
||||||
mypy==1.11.2
|
mypy==1.15.0
|
||||||
types-six==1.17.0.20241205
|
types-six==1.17.0.20250304
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
60
.github/workflows/valid-style.yml
vendored
60
.github/workflows/valid-style.yml
vendored
@@ -86,66 +86,6 @@ jobs:
|
|||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack -d style -t black
|
spack -d style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
# Check we don't make the situation with circular imports worse
|
|
||||||
import-check:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: julia-actions/setup-julia@v2
|
|
||||||
with:
|
|
||||||
version: '1.10'
|
|
||||||
- uses: julia-actions/cache@v2
|
|
||||||
|
|
||||||
# PR: use the base of the PR as the old commit
|
|
||||||
- name: Checkout PR base commit
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.base.sha }}
|
|
||||||
path: old
|
|
||||||
# not a PR: use the previous commit as the old commit
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
path: old
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
run: git -C old reset --hard HEAD^
|
|
||||||
|
|
||||||
- name: Checkout new commit
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
path: new
|
|
||||||
- name: Install circular import checker
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
repository: haampie/circular-import-fighter
|
|
||||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
|
||||||
path: circular-import-fighter
|
|
||||||
- name: Install dependencies
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make -j dependencies
|
|
||||||
- name: Problematic imports before
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
|
||||||
- name: Problematic imports after
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
|
||||||
- name: Compare import cycles
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: |
|
|
||||||
edges_before="$(head -n1 solution.old)"
|
|
||||||
edges_after="$(head -n1 solution.new)"
|
|
||||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
|
||||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
|
||||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
|
||||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
|
||||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Further style checks from pylint
|
# Further style checks from pylint
|
||||||
pylint:
|
pylint:
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,7 +201,6 @@ tramp
|
|||||||
|
|
||||||
# Org-mode
|
# Org-mode
|
||||||
.org-id-locations
|
.org-id-locations
|
||||||
*_archive
|
|
||||||
|
|
||||||
# flymake-mode
|
# flymake-mode
|
||||||
*_flymake.*
|
*_flymake.*
|
||||||
|
@@ -25,7 +25,6 @@ exit 1
|
|||||||
# The code above runs this file with our preferred python interpreter.
|
# The code above runs this file with our preferred python interpreter.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
min_python3 = (3, 6)
|
min_python3 = (3, 6)
|
||||||
|
@@ -43,6 +43,28 @@ concretizer:
|
|||||||
# (e.g. py-setuptools, cmake etc.)
|
# (e.g. py-setuptools, cmake etc.)
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
|
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
|
||||||
|
# number used if there isn't a more specific alternative
|
||||||
|
max_dupes:
|
||||||
|
default: 1
|
||||||
|
# Virtuals
|
||||||
|
c: 2
|
||||||
|
cxx: 2
|
||||||
|
fortran: 1
|
||||||
|
# Regular packages
|
||||||
|
cmake: 2
|
||||||
|
gmake: 2
|
||||||
|
python: 2
|
||||||
|
python-venv: 2
|
||||||
|
py-cython: 2
|
||||||
|
py-flit-core: 2
|
||||||
|
py-pip: 2
|
||||||
|
py-setuptools: 2
|
||||||
|
py-wheel: 2
|
||||||
|
xcb-proto: 2
|
||||||
|
# Compilers
|
||||||
|
gcc: 2
|
||||||
|
llvm: 2
|
||||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
@@ -63,3 +85,7 @@ concretizer:
|
|||||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||||
error_on_timeout: true
|
error_on_timeout: true
|
||||||
|
|
||||||
|
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
|
||||||
|
# cases where there are requirements that prevent part of the search space to be explored.
|
||||||
|
static_analysis: false
|
||||||
|
@@ -36,7 +36,7 @@ packages:
|
|||||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||||
iconv: [libiconv]
|
iconv: [libiconv]
|
||||||
ipp: [intel-oneapi-ipp]
|
ipp: [intel-oneapi-ipp]
|
||||||
java: [openjdk, jdk, ibm-java]
|
java: [openjdk, jdk]
|
||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
@@ -73,15 +73,27 @@ packages:
|
|||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
|
cray-fftw:
|
||||||
|
buildable: false
|
||||||
|
cray-libsci:
|
||||||
|
buildable: false
|
||||||
cray-mpich:
|
cray-mpich:
|
||||||
buildable: false
|
buildable: false
|
||||||
cray-mvapich2:
|
cray-mvapich2:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
cray-pmi:
|
||||||
|
buildable: false
|
||||||
egl:
|
egl:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
essl:
|
||||||
|
buildable: false
|
||||||
fujitsu-mpi:
|
fujitsu-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
fujitsu-ssl2:
|
||||||
|
buildable: false
|
||||||
hpcx-mpi:
|
hpcx-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
mpt:
|
||||||
|
buildable: false
|
||||||
spectrum-mpi:
|
spectrum-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
config:
|
config:
|
||||||
locks: false
|
locks: false
|
||||||
build_stage::
|
build_stage::
|
||||||
- '$spack/.staging'
|
- '$user_cache_path/stage'
|
||||||
stage_name: '{name}-{version}-{hash:7}'
|
stage_name: '{name}-{version}-{hash:7}'
|
||||||
|
@@ -1761,19 +1761,24 @@ Verifying installations
|
|||||||
The ``spack verify`` command can be used to verify the validity of
|
The ``spack verify`` command can be used to verify the validity of
|
||||||
Spack-installed packages any time after installation.
|
Spack-installed packages any time after installation.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
``spack verify manifest``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
At installation time, Spack creates a manifest of every file in the
|
At installation time, Spack creates a manifest of every file in the
|
||||||
installation prefix. For links, Spack tracks the mode, ownership, and
|
installation prefix. For links, Spack tracks the mode, ownership, and
|
||||||
destination. For directories, Spack tracks the mode, and
|
destination. For directories, Spack tracks the mode, and
|
||||||
ownership. For files, Spack tracks the mode, ownership, modification
|
ownership. For files, Spack tracks the mode, ownership, modification
|
||||||
time, hash, and size. The Spack verify command will check, for every
|
time, hash, and size. The ``spack verify manifest`` command will check,
|
||||||
file in each package, whether any of those attributes have changed. It
|
for every file in each package, whether any of those attributes have
|
||||||
will also check for newly added files or deleted files from the
|
changed. It will also check for newly added files or deleted files from
|
||||||
installation prefix. Spack can either check all installed packages
|
the installation prefix. Spack can either check all installed packages
|
||||||
using the `-a,--all` or accept specs listed on the command line to
|
using the `-a,--all` or accept specs listed on the command line to
|
||||||
verify.
|
verify.
|
||||||
|
|
||||||
The ``spack verify`` command can also verify for individual files that
|
The ``spack verify manifest`` command can also verify for individual files
|
||||||
they haven't been altered since installation time. If the given file
|
that they haven't been altered since installation time. If the given file
|
||||||
is not in a Spack installation prefix, Spack will report that it is
|
is not in a Spack installation prefix, Spack will report that it is
|
||||||
not owned by any package. To check individual files instead of specs,
|
not owned by any package. To check individual files instead of specs,
|
||||||
use the ``-f,--files`` option.
|
use the ``-f,--files`` option.
|
||||||
@@ -1788,6 +1793,22 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
``spack verify libraries``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The ``spack verify libraries`` command can be used to verify that packages
|
||||||
|
do not have accidental system dependencies. This command scans the install
|
||||||
|
prefixes of packages for executables and shared libraries, and resolves
|
||||||
|
their needed libraries in their RPATHs. When needed libraries cannot be
|
||||||
|
located, an error is reported. This typically indicates that a package
|
||||||
|
was linked against a system library, instead of a library provided by
|
||||||
|
a Spack package.
|
||||||
|
|
||||||
|
This verification can also be enabled as a post-install hook by setting
|
||||||
|
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
|
||||||
|
in :ref:`config.yaml <config-yaml>`.
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
@@ -170,7 +170,7 @@ bootstrapping.
|
|||||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||||
|
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||||
|
|
||||||
This command needs to be run on a machine with internet access and the resulting folder
|
This command needs to be run on a machine with internet access and the resulting folder
|
||||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||||
|
@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
|
|||||||
"environment variables" section lists environment variables that the
|
"environment variables" section lists environment variables that the
|
||||||
build system uses to pass flags to the compiler and linker.
|
build system uses to pass flags to the compiler and linker.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Addings flags to configure
|
Adding flags to configure
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
For most of the flags you encounter, you will want a variant to
|
For most of the flags you encounter, you will want a variant to
|
||||||
optionally enable/disable them. You can then optionally pass these
|
optionally enable/disable them. You can then optionally pass these
|
||||||
@@ -285,7 +285,7 @@ function like so:
|
|||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
args = []
|
args = []
|
||||||
|
...
|
||||||
if self.spec.satisfies("+mpi"):
|
if self.spec.satisfies("+mpi"):
|
||||||
args.append("--enable-mpi")
|
args.append("--enable-mpi")
|
||||||
else:
|
else:
|
||||||
@@ -299,7 +299,10 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
|
|||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
return [self.enable_or_disable("mpi")]
|
args = []
|
||||||
|
...
|
||||||
|
args.extend(self.enable_or_disable("mpi"))
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
Note that we are explicitly disabling MPI support if it is not
|
Note that we are explicitly disabling MPI support if it is not
|
||||||
@@ -344,7 +347,14 @@ typically used to enable or disable some feature within the package.
|
|||||||
default=False,
|
default=False,
|
||||||
description="Memchecker support for debugging [degrades performance]"
|
description="Memchecker support for debugging [degrades performance]"
|
||||||
)
|
)
|
||||||
config_args.extend(self.enable_or_disable("memchecker"))
|
...
|
||||||
|
|
||||||
|
def configure_args(self):
|
||||||
|
args = []
|
||||||
|
...
|
||||||
|
args.extend(self.enable_or_disable("memchecker"))
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
In this example, specifying the variant ``+memchecker`` will generate
|
In this example, specifying the variant ``+memchecker`` will generate
|
||||||
the following configuration options:
|
the following configuration options:
|
||||||
|
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
phases = ["configure", "build", "install"]
|
phases = ("configure", "build", "install")
|
||||||
|
|
||||||
Similarly, ``cmake`` defines:
|
Similarly, ``cmake`` defines:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
phases = ["bootstrap", "build", "install"]
|
phases = ("bootstrap", "build", "install")
|
||||||
|
|
||||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||||
|
@@ -223,6 +223,10 @@ def setup(sphinx):
|
|||||||
("py:class", "spack.compiler.CompilerCache"),
|
("py:class", "spack.compiler.CompilerCache"),
|
||||||
# TypeVar that is not handled correctly
|
# TypeVar that is not handled correctly
|
||||||
("py:class", "llnl.util.lang.T"),
|
("py:class", "llnl.util.lang.T"),
|
||||||
|
("py:class", "llnl.util.lang.KT"),
|
||||||
|
("py:class", "llnl.util.lang.VT"),
|
||||||
|
("py:obj", "llnl.util.lang.KT"),
|
||||||
|
("py:obj", "llnl.util.lang.VT"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
@@ -125,6 +125,8 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
|||||||
by default. Can be purged with :ref:`spack clean --downloads
|
by default. Can be purged with :ref:`spack clean --downloads
|
||||||
<cmd-spack-clean>`.
|
<cmd-spack-clean>`.
|
||||||
|
|
||||||
|
.. _Misc Cache:
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``misc_cache``
|
``misc_cache``
|
||||||
--------------------
|
--------------------
|
||||||
@@ -334,3 +336,52 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
|||||||
|
|
||||||
aliases:
|
aliases:
|
||||||
inst: install -v
|
inst: install -v
|
||||||
|
|
||||||
|
-------------------------------
|
||||||
|
``concretization_cache:enable``
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
When set to ``true``, Spack will utilize a cache of solver outputs from
|
||||||
|
successful concretization runs. When enabled, Spack will check the concretization
|
||||||
|
cache prior to running the solver. If a previous request to solve a given
|
||||||
|
problem is present in the cache, Spack will load the concrete specs and other
|
||||||
|
solver data from the cache rather than running the solver. Specs not previously
|
||||||
|
concretized will be added to the cache on a successful solve. The cache additionally
|
||||||
|
holds solver statistics, so commands like ``spack solve`` will still return information
|
||||||
|
about the run that produced a given solver result.
|
||||||
|
|
||||||
|
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
||||||
|
Cache is cleaned.
|
||||||
|
|
||||||
|
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
||||||
|
|
||||||
|
----------------------------
|
||||||
|
``concretization_cache:url``
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Path to the location where Spack will root the concretization cache. Currently this only supports
|
||||||
|
paths on the local filesystem.
|
||||||
|
|
||||||
|
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
||||||
|
|
||||||
|
------------------------------------
|
||||||
|
``concretization_cache:entry_limit``
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
||||||
|
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||||
|
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||||
|
|
||||||
|
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||||
|
responsible for maintaining this cache.
|
||||||
|
|
||||||
|
-----------------------------------
|
||||||
|
``concretization_cache:size_limit``
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
||||||
|
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||||
|
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||||
|
|
||||||
|
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||||
|
responsible for maintaining this cache.
|
||||||
|
@@ -14,6 +14,7 @@ case you want to skip directly to specific docs:
|
|||||||
* :ref:`compilers.yaml <compiler-config>`
|
* :ref:`compilers.yaml <compiler-config>`
|
||||||
* :ref:`concretizer.yaml <concretizer-options>`
|
* :ref:`concretizer.yaml <concretizer-options>`
|
||||||
* :ref:`config.yaml <config-yaml>`
|
* :ref:`config.yaml <config-yaml>`
|
||||||
|
* :ref:`include.yaml <include-yaml>`
|
||||||
* :ref:`mirrors.yaml <mirrors>`
|
* :ref:`mirrors.yaml <mirrors>`
|
||||||
* :ref:`modules.yaml <modules>`
|
* :ref:`modules.yaml <modules>`
|
||||||
* :ref:`packages.yaml <packages-config>`
|
* :ref:`packages.yaml <packages-config>`
|
||||||
|
@@ -361,7 +361,6 @@ and the tags associated with the class of runners to build on.
|
|||||||
* ``.linux_neoverse_n1``
|
* ``.linux_neoverse_n1``
|
||||||
* ``.linux_neoverse_v1``
|
* ``.linux_neoverse_v1``
|
||||||
* ``.linux_neoverse_v2``
|
* ``.linux_neoverse_v2``
|
||||||
* ``.linux_power``
|
|
||||||
* ``.linux_skylake``
|
* ``.linux_skylake``
|
||||||
* ``.linux_x86_64``
|
* ``.linux_x86_64``
|
||||||
* ``.linux_x86_64_v4``
|
* ``.linux_x86_64_v4``
|
||||||
|
@@ -543,10 +543,10 @@ With either interpreter you can run a single command:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
$ spack python -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||||
...
|
...
|
||||||
|
|
||||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
$ spack python -i ipython -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||||
Out[1]: ...
|
Out[1]: ...
|
||||||
|
|
||||||
or a file:
|
or a file:
|
||||||
|
@@ -112,6 +112,19 @@ the original but may concretize differently in the presence of different
|
|||||||
explicit or default configuration settings (e.g., a different version of
|
explicit or default configuration settings (e.g., a different version of
|
||||||
Spack or for a different user account).
|
Spack or for a different user account).
|
||||||
|
|
||||||
|
Environments created from a manifest will copy any included configs
|
||||||
|
from relative paths inside the environment. Relative paths from
|
||||||
|
outside the environment will cause errors, and absolute paths will be
|
||||||
|
kept absolute. For example, if ``spack.yaml`` includes:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
include: [./config.yaml]
|
||||||
|
|
||||||
|
then the created environment will have its own copy of the file
|
||||||
|
``config.yaml`` copied from the location in the original environment.
|
||||||
|
|
||||||
Create an environment from a ``spack.lock`` file using:
|
Create an environment from a ``spack.lock`` file using:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -160,7 +173,7 @@ accepts. If an environment already exists then spack will simply activate it
|
|||||||
and ignore the create-specific flags.
|
and ignore the create-specific flags.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --create -p myenv
|
$ spack env activate --create -p myenv
|
||||||
# ...
|
# ...
|
||||||
# [creates if myenv does not exist yet]
|
# [creates if myenv does not exist yet]
|
||||||
@@ -424,8 +437,8 @@ Developing Packages in a Spack Environment
|
|||||||
|
|
||||||
The ``spack develop`` command allows one to develop Spack packages in
|
The ``spack develop`` command allows one to develop Spack packages in
|
||||||
an environment. It requires a spec containing a concrete version, and
|
an environment. It requires a spec containing a concrete version, and
|
||||||
will configure Spack to install the package from local source.
|
will configure Spack to install the package from local source.
|
||||||
If a version is not provided from the command line interface then spack
|
If a version is not provided from the command line interface then spack
|
||||||
will automatically pick the highest version the package has defined.
|
will automatically pick the highest version the package has defined.
|
||||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||||
preferred in this selection process.
|
preferred in this selection process.
|
||||||
@@ -435,9 +448,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
|
|||||||
any time the environment is installed if the package's local source
|
any time the environment is installed if the package's local source
|
||||||
code has been modified. Spack's native implementation to check for modifications
|
code has been modified. Spack's native implementation to check for modifications
|
||||||
is to check if ``mtime`` is newer than the installation.
|
is to check if ``mtime`` is newer than the installation.
|
||||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||||
in your package class. This is particularly useful for projects using custom spack repo's
|
in your package class. This is particularly useful for projects using custom spack repo's
|
||||||
to drive development and want to optimize performance.
|
to drive development and want to optimize performance.
|
||||||
|
|
||||||
Spack ensures that all instances of a
|
Spack ensures that all instances of a
|
||||||
developed package in the environment are concretized to match the
|
developed package in the environment are concretized to match the
|
||||||
@@ -453,7 +466,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
|
|||||||
and eventually committed and pushed to the upstream git repo.
|
and eventually committed and pushed to the upstream git repo.
|
||||||
|
|
||||||
If the package being developed supports out-of-source builds then users can use the
|
If the package being developed supports out-of-source builds then users can use the
|
||||||
``--build_directory`` flag to control the location and name of the build directory.
|
``--build_directory`` flag to control the location and name of the build directory.
|
||||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||||
The supplied location will become the build-directory for that package in all future builds.
|
The supplied location will become the build-directory for that package in all future builds.
|
||||||
@@ -657,24 +670,45 @@ This configuration sets the default compiler for all packages to
|
|||||||
Included configurations
|
Included configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml
|
Spack environments allow an ``include`` heading in their yaml schema.
|
||||||
schema. This heading pulls in external configuration files and applies
|
This heading pulls in external configuration files and applies them to
|
||||||
them to the environment.
|
the environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- relative/path/to/config.yaml
|
- environment/relative/path/to/config.yaml
|
||||||
- https://github.com/path/to/raw/config/compilers.yaml
|
- https://github.com/path/to/raw/config/compilers.yaml
|
||||||
- /absolute/path/to/packages.yaml
|
- /absolute/path/to/packages.yaml
|
||||||
|
- path: /path/to/$os/$target/environment
|
||||||
|
optional: true
|
||||||
|
- path: /path/to/os-specific/config-dir
|
||||||
|
when: os == "ventura"
|
||||||
|
|
||||||
|
Included configuration files are required *unless* they are explicitly optional
|
||||||
|
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||||
|
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
||||||
|
:ref:`include-yaml` for more information on optional and conditional entries.)
|
||||||
|
|
||||||
|
Files are listed using paths to individual files or directories containing them.
|
||||||
|
Path entries may be absolute or relative to the environment or specified as
|
||||||
|
URLs. URLs to individual files need link to the **raw** form of the file's
|
||||||
|
contents (e.g., `GitHub
|
||||||
|
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||||
|
or `GitLab
|
||||||
|
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||||
|
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
||||||
|
supported. Spack-specific, environment and user path variables can be used.
|
||||||
|
(See :ref:`config-file-variables` for more information.)
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Recursive includes are not currently processed in a breadth-first manner
|
||||||
|
so the value of a configuration option that is altered by multiple included
|
||||||
|
files may not be what you expect. This will be addressed in a future
|
||||||
|
update.
|
||||||
|
|
||||||
Environments can include files or URLs. File paths can be relative or
|
|
||||||
absolute. URLs include the path to the text for individual files or
|
|
||||||
can be the path to a directory containing configuration files.
|
|
||||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
|
||||||
schemes). Spack-specific, environment and user path variables may be
|
|
||||||
used in these paths. See :ref:`config-file-variables` for more information.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
|
51
lib/spack/docs/include_yaml.rst
Normal file
51
lib/spack/docs/include_yaml.rst
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _include-yaml:
|
||||||
|
|
||||||
|
===============================
|
||||||
|
Include Settings (include.yaml)
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Spack allows you to include configuration files through ``include.yaml``.
|
||||||
|
Using the ``include:`` heading results in pulling in external configuration
|
||||||
|
information to be used by any Spack command.
|
||||||
|
|
||||||
|
Included configuration files are required *unless* they are explicitly optional
|
||||||
|
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||||
|
with the ``optional`` clause and conditional with the ``when`` clause. For
|
||||||
|
example,
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
include:
|
||||||
|
- /path/to/a/required/config.yaml
|
||||||
|
- path: /path/to/$os/$target/config
|
||||||
|
optional: true
|
||||||
|
- path: /path/to/os-specific/config-dir
|
||||||
|
when: os == "ventura"
|
||||||
|
|
||||||
|
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
||||||
|
indicates that included ``config.yaml`` file is required (so must exist).
|
||||||
|
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
||||||
|
the path is only included if it exists. The condition ``os == "ventura"``
|
||||||
|
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
||||||
|
path is only included when the operating system (``os``) is ``ventura``.
|
||||||
|
|
||||||
|
The same conditions and variables in `Spec List References
|
||||||
|
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
||||||
|
can be used for conditional activation in the ``when`` clauses.
|
||||||
|
|
||||||
|
Included files can be specified by path or by their parent directory.
|
||||||
|
Paths may be absolute, relative (to the configuration file including the path),
|
||||||
|
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
||||||
|
schemes) are supported. Spack-specific, environment and user path variables
|
||||||
|
can be used. (See :ref:`config-file-variables` for more information.)
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Recursive includes are not currently processed in a breadth-first manner
|
||||||
|
so the value of a configuration option that is altered by multiple included
|
||||||
|
files may not be what you expect. This will be addressed in a future
|
||||||
|
update.
|
@@ -71,6 +71,7 @@ or refer to the full manual below.
|
|||||||
|
|
||||||
configuration
|
configuration
|
||||||
config_yaml
|
config_yaml
|
||||||
|
include_yaml
|
||||||
packages_yaml
|
packages_yaml
|
||||||
build_settings
|
build_settings
|
||||||
environments
|
environments
|
||||||
|
@@ -456,14 +456,13 @@ For instance, the following config options,
|
|||||||
tcl:
|
tcl:
|
||||||
all:
|
all:
|
||||||
suffixes:
|
suffixes:
|
||||||
^python@3: 'python{^python.version}'
|
^python@3: 'python{^python.version.up_to_2}'
|
||||||
^openblas: 'openblas'
|
^openblas: 'openblas'
|
||||||
|
|
||||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
will add a ``python3.12`` to module names of packages compiled with Python 3.12, and similarly for
|
||||||
Python matching the spec, ``python@3``. This is useful to know which
|
all specs depending on ``python@3``. This is useful to know which version of Python a set of Python
|
||||||
version of Python a set of Python extensions is associated with. Likewise, the
|
extensions is associated with. Likewise, the ``openblas`` string is attached to any program that
|
||||||
``openblas`` string is attached to any program that has openblas in the spec,
|
has openblas in the spec, most likely via the ``+blas`` variant specification.
|
||||||
most likely via the ``+blas`` variant specification.
|
|
||||||
|
|
||||||
The most heavyweight solution to module naming is to change the entire
|
The most heavyweight solution to module naming is to change the entire
|
||||||
naming convention for module files. This uses the projections format
|
naming convention for module files. This uses the projections format
|
||||||
|
@@ -820,6 +820,69 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
|
|||||||
build group on CDash called "Release Testing" (that group will be created if
|
build group on CDash called "Release Testing" (that group will be created if
|
||||||
it didn't already exist).
|
it didn't already exist).
|
||||||
|
|
||||||
|
.. _ci_artifacts:
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
CI Artifacts Directory Layout
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
|
||||||
|
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
|
||||||
|
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
|
||||||
|
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
|
||||||
|
|
||||||
|
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
|
||||||
|
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
|
||||||
|
it require user specified files be written to any specific directory.
|
||||||
|
|
||||||
|
------------------------
|
||||||
|
``concrete_environment``
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
|
||||||
|
the concrete ``spack.lock`` for the CI environment.
|
||||||
|
|
||||||
|
--------
|
||||||
|
``logs``
|
||||||
|
--------
|
||||||
|
|
||||||
|
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
|
||||||
|
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
|
||||||
|
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
|
||||||
|
|
||||||
|
----------------
|
||||||
|
``reproduction``
|
||||||
|
----------------
|
||||||
|
|
||||||
|
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
|
||||||
|
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
|
||||||
|
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
|
||||||
|
|
||||||
|
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
|
||||||
|
An example of what a ``repro.json`` may look like is here.
|
||||||
|
|
||||||
|
.. code:: json
|
||||||
|
|
||||||
|
{
|
||||||
|
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
|
||||||
|
"job_spec_json": "adios2.json",
|
||||||
|
"ci_project_dir": "/builds/spack/spack"
|
||||||
|
}
|
||||||
|
|
||||||
|
---------
|
||||||
|
``tests``
|
||||||
|
---------
|
||||||
|
|
||||||
|
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
|
||||||
|
data in it depending on the package that was built and the availability of tests.
|
||||||
|
|
||||||
|
-------------
|
||||||
|
``user_data``
|
||||||
|
-------------
|
||||||
|
|
||||||
|
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
|
||||||
|
Users may use this to store additional logs or metrics or other types of files generated by the build job.
|
||||||
|
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
Using a custom spack in your pipeline
|
Using a custom spack in your pipeline
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
|
@@ -1,13 +1,13 @@
|
|||||||
sphinx==8.1.3
|
sphinx==8.2.3
|
||||||
sphinxcontrib-programoutput==0.18
|
sphinxcontrib-programoutput==0.18
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.26.1
|
python-levenshtein==0.27.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
pytest==8.3.4
|
pytest==8.3.5
|
||||||
isort==5.13.2
|
isort==6.0.1
|
||||||
black==24.10.0
|
black==25.1.0
|
||||||
flake8==7.1.1
|
flake8==7.1.2
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""URL primitives that just require Python standard library."""
|
"""URL primitives that just require Python standard library."""
|
||||||
import itertools
|
import itertools
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Optional, Set, Tuple
|
from typing import Optional, Set, Tuple
|
||||||
from urllib.parse import urlsplit, urlunsplit
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
@@ -7,6 +7,7 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
@@ -20,6 +21,7 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
from itertools import accumulate
|
||||||
from typing import (
|
from typing import (
|
||||||
|
IO,
|
||||||
Callable,
|
Callable,
|
||||||
Deque,
|
Deque,
|
||||||
Dict,
|
Dict,
|
||||||
@@ -75,7 +77,6 @@
|
|||||||
"install_tree",
|
"install_tree",
|
||||||
"is_exe",
|
"is_exe",
|
||||||
"join_path",
|
"join_path",
|
||||||
"last_modification_time_recursive",
|
|
||||||
"library_extensions",
|
"library_extensions",
|
||||||
"mkdirp",
|
"mkdirp",
|
||||||
"partition_path",
|
"partition_path",
|
||||||
@@ -669,7 +670,7 @@ def copy(src, dest, _permissions=False):
|
|||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* matches multiple files but *dest* is
|
ValueError: if *src* matches multiple files but *dest* is
|
||||||
not a directory
|
not a directory
|
||||||
"""
|
"""
|
||||||
@@ -680,7 +681,7 @@ def copy(src, dest, _permissions=False):
|
|||||||
|
|
||||||
files = glob.glob(src)
|
files = glob.glob(src)
|
||||||
if not files:
|
if not files:
|
||||||
raise IOError("No such file or directory: '{0}'".format(src))
|
raise OSError("No such file or directory: '{0}'".format(src))
|
||||||
if len(files) > 1 and not os.path.isdir(dest):
|
if len(files) > 1 and not os.path.isdir(dest):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
||||||
@@ -711,7 +712,7 @@ def install(src, dest):
|
|||||||
dest (str): the destination file or directory
|
dest (str): the destination file or directory
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* matches multiple files but *dest* is
|
ValueError: if *src* matches multiple files but *dest* is
|
||||||
not a directory
|
not a directory
|
||||||
"""
|
"""
|
||||||
@@ -749,7 +750,7 @@ def copy_tree(
|
|||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
if _permissions:
|
if _permissions:
|
||||||
@@ -763,7 +764,7 @@ def copy_tree(
|
|||||||
|
|
||||||
files = glob.glob(src)
|
files = glob.glob(src)
|
||||||
if not files:
|
if not files:
|
||||||
raise IOError("No such file or directory: '{0}'".format(src))
|
raise OSError("No such file or directory: '{0}'".format(src))
|
||||||
|
|
||||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||||
@@ -844,7 +845,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
|||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||||
@@ -1470,15 +1471,36 @@ def set_executable(path):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def last_modification_time_recursive(path):
|
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||||
path = os.path.abspath(path)
|
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||||
times = [os.stat(path).st_mtime]
|
# use bfs order to increase likelihood of early return
|
||||||
times.extend(
|
queue: Deque[str] = collections.deque([path])
|
||||||
os.lstat(os.path.join(root, name)).st_mtime
|
|
||||||
for root, dirs, files in os.walk(path)
|
if os.stat(path).st_mtime > time:
|
||||||
for name in dirs + files
|
return True
|
||||||
)
|
|
||||||
return max(times)
|
while queue:
|
||||||
|
current = queue.popleft()
|
||||||
|
|
||||||
|
try:
|
||||||
|
entries = os.scandir(current)
|
||||||
|
except OSError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
with entries:
|
||||||
|
for entry in entries:
|
||||||
|
try:
|
||||||
|
st = entry.stat(follow_symlinks=False)
|
||||||
|
except OSError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if st.st_mtime > time:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if entry.is_dir(follow_symlinks=False):
|
||||||
|
queue.append(entry.path)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -1740,8 +1762,7 @@ def find(
|
|||||||
|
|
||||||
|
|
||||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
tty.debug(f"find must skip {path}: {e}")
|
||||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||||
@@ -2435,26 +2456,69 @@ class WindowsSimulatedRPath:
|
|||||||
and vis versa.
|
and vis versa.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, package, link_install_prefix=True):
|
def __init__(
|
||||||
|
self,
|
||||||
|
package,
|
||||||
|
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
||||||
|
link_install_prefix: bool = True,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
package (spack.package_base.PackageBase): Package requiring links
|
package (spack.package_base.PackageBase): Package requiring links
|
||||||
|
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
||||||
|
the root directory in which to establish the simulated rpath, ie where the
|
||||||
|
symlinks that comprise the "rpath" behavior will be installed.
|
||||||
|
|
||||||
|
Note: This is a mutually exclusive option with `link_install_prefix` using
|
||||||
|
both is an error.
|
||||||
|
|
||||||
|
Default: None
|
||||||
link_install_prefix (bool): Link against package's own install or stage root.
|
link_install_prefix (bool): Link against package's own install or stage root.
|
||||||
Packages that run their own executables during build and require rpaths to
|
Packages that run their own executables during build and require rpaths to
|
||||||
the build directory during build time require this option. Default: install
|
the build directory during build time require this option.
|
||||||
|
|
||||||
|
Default: install
|
||||||
root
|
root
|
||||||
|
|
||||||
|
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
||||||
|
both is an error.
|
||||||
"""
|
"""
|
||||||
self.pkg = package
|
self.pkg = package
|
||||||
self._addl_rpaths = set()
|
self._addl_rpaths: set[str] = set()
|
||||||
|
if link_install_prefix and base_modification_prefix:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
||||||
|
"Select either `link_install_prefix` to create an install prefix rpath"
|
||||||
|
" or specify a `base_modification_prefix` for any other link type. "
|
||||||
|
"Specifying both arguments is invalid."
|
||||||
|
)
|
||||||
|
if not (link_install_prefix or base_modification_prefix):
|
||||||
|
raise RuntimeError(
|
||||||
|
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
||||||
|
"WindowsSimulatedRPath requires one of link_install_prefix"
|
||||||
|
" or base_modification_prefix to be specified."
|
||||||
|
" Neither was provided."
|
||||||
|
)
|
||||||
|
|
||||||
self.link_install_prefix = link_install_prefix
|
self.link_install_prefix = link_install_prefix
|
||||||
self._additional_library_dependents = set()
|
if base_modification_prefix:
|
||||||
|
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
||||||
|
else:
|
||||||
|
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
||||||
|
self._additional_library_dependents: set[pathlib.Path] = set()
|
||||||
|
if not self.link_install_prefix:
|
||||||
|
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def library_dependents(self):
|
def library_dependents(self):
|
||||||
"""
|
"""
|
||||||
Set of directories where package binaries/libraries are located.
|
Set of directories where package binaries/libraries are located.
|
||||||
"""
|
"""
|
||||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
base_pths = set()
|
||||||
|
if self.link_install_prefix:
|
||||||
|
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
||||||
|
base_pths |= self._additional_library_dependents
|
||||||
|
return base_pths
|
||||||
|
|
||||||
def add_library_dependent(self, *dest):
|
def add_library_dependent(self, *dest):
|
||||||
"""
|
"""
|
||||||
@@ -2470,6 +2534,12 @@ def add_library_dependent(self, *dest):
|
|||||||
new_pth = pathlib.Path(pth).parent
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
new_pth = pathlib.Path(pth)
|
new_pth = pathlib.Path(pth)
|
||||||
|
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
||||||
|
if not path_is_in_prefix:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Attempting to generate rpath symlink out of rpath context:\
|
||||||
|
{str(self.base_modification_prefix)}"
|
||||||
|
)
|
||||||
self._additional_library_dependents.add(new_pth)
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -2558,6 +2628,33 @@ def establish_link(self):
|
|||||||
self._link(library, lib_dir)
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
||||||
|
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
||||||
|
so an executable can test the libraries/executables with proper access
|
||||||
|
to dependent dlls
|
||||||
|
|
||||||
|
Note: this is a no-op on all other platforms besides Windows
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
||||||
|
test_dir: the testing directory in which we should construct an rpath
|
||||||
|
"""
|
||||||
|
# link_install_prefix as false ensures we're not linking into the install prefix
|
||||||
|
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
||||||
|
# add the testing directory as a location to install rpath symlinks
|
||||||
|
mini_rpath.add_library_dependent(test_dir)
|
||||||
|
|
||||||
|
# check for whether build_directory is available, if not
|
||||||
|
# assume the stage root is the build dir
|
||||||
|
build_dir_attr = getattr(pkg, "build_directory", None)
|
||||||
|
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
||||||
|
# add the build dir & build dir bin
|
||||||
|
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
||||||
|
mini_rpath.add_rpath(os.path.join(build_directory))
|
||||||
|
# construct rpath
|
||||||
|
mini_rpath.establish_link()
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@memoized
|
@memoized
|
||||||
def can_access_dir(path):
|
def can_access_dir(path):
|
||||||
@@ -2786,6 +2883,20 @@ def keep_modification_time(*filenames):
|
|||||||
os.utime(f, (os.path.getatime(f), mtime))
|
os.utime(f, (os.path.getatime(f), mtime))
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def temporary_file_position(stream):
|
||||||
|
orig_pos = stream.tell()
|
||||||
|
yield
|
||||||
|
stream.seek(orig_pos)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
||||||
|
with temporary_file_position(stream):
|
||||||
|
stream.seek(loc, relative_to)
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def temporary_dir(
|
def temporary_dir(
|
||||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||||
|
@@ -11,10 +11,11 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
import types
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = r"^\.#|~$"
|
ignore_modules = r"^\.#|~$"
|
||||||
@@ -707,14 +708,24 @@ def __init__(self, wrapped_object):
|
|||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton:
|
||||||
"""Simple wrapper for lazily initialized singleton objects."""
|
"""Wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory):
|
def __init__(self, factory: Callable[[], object]):
|
||||||
"""Create a new singleton to be inited with the factory function.
|
"""Create a new singleton to be inited with the factory function.
|
||||||
|
|
||||||
|
Most factories will simply create the object to be initialized and
|
||||||
|
return it.
|
||||||
|
|
||||||
|
In some cases, e.g. when bootstrapping some global state, the singleton
|
||||||
|
may need to be initialized incrementally. If the factory returns a generator
|
||||||
|
instead of a regular object, the singleton will assign each result yielded by
|
||||||
|
the generator to the singleton instance. This allows methods called by
|
||||||
|
the factory in later stages to refer back to the singleton.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
factory (function): function taking no arguments that
|
factory (function): function taking no arguments that creates the
|
||||||
creates the singleton instance.
|
singleton instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.factory = factory
|
self.factory = factory
|
||||||
self._instance = None
|
self._instance = None
|
||||||
@@ -722,7 +733,16 @@ def __init__(self, factory):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
if self._instance is None:
|
if self._instance is None:
|
||||||
self._instance = self.factory()
|
instance = self.factory()
|
||||||
|
|
||||||
|
if isinstance(instance, types.GeneratorType):
|
||||||
|
# if it's a generator, assign every value
|
||||||
|
for value in instance:
|
||||||
|
self._instance = value
|
||||||
|
else:
|
||||||
|
# if not, just assign the result like a normal singleton
|
||||||
|
self._instance = instance
|
||||||
|
|
||||||
return self._instance
|
return self._instance
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@@ -1080,3 +1100,88 @@ def __set__(self, instance, value):
|
|||||||
|
|
||||||
def factory(self, instance, owner):
|
def factory(self, instance, owner):
|
||||||
raise NotImplementedError("must be implemented by derived classes")
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
|
|
||||||
|
KT = TypeVar("KT")
|
||||||
|
VT = TypeVar("VT")
|
||||||
|
|
||||||
|
|
||||||
|
class PriorityOrderedMapping(Mapping[KT, VT]):
|
||||||
|
"""Mapping that iterates over key according to an integer priority. If the priority is
|
||||||
|
the same for two keys, insertion order is what matters.
|
||||||
|
|
||||||
|
The priority is set when the key/value pair is added. If not set, the highest current priority
|
||||||
|
is used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_data: Dict[KT, VT]
|
||||||
|
_priorities: List[Tuple[int, KT]]
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._data = {}
|
||||||
|
# Tuple of (priority, key)
|
||||||
|
self._priorities = []
|
||||||
|
|
||||||
|
def __getitem__(self, key: KT) -> VT:
|
||||||
|
return self._data[key]
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
return len(self._data)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
yield from (key for _, key in self._priorities)
|
||||||
|
|
||||||
|
def __reversed__(self):
|
||||||
|
yield from (key for _, key in reversed(self._priorities))
|
||||||
|
|
||||||
|
def reversed_keys(self):
|
||||||
|
"""Iterates over keys from the highest priority, to the lowest."""
|
||||||
|
return reversed(self)
|
||||||
|
|
||||||
|
def reversed_values(self):
|
||||||
|
"""Iterates over values from the highest priority, to the lowest."""
|
||||||
|
yield from (self._data[key] for _, key in reversed(self._priorities))
|
||||||
|
|
||||||
|
def _highest_priority(self) -> int:
|
||||||
|
if not self._priorities:
|
||||||
|
return 0
|
||||||
|
result, _ = self._priorities[-1]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
|
||||||
|
"""Adds a key/value pair to the mapping, with a specific priority.
|
||||||
|
|
||||||
|
If the priority is None, then it is assumed to be the highest priority value currently
|
||||||
|
in the container.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: when the same priority is already in the mapping
|
||||||
|
"""
|
||||||
|
if priority is None:
|
||||||
|
priority = self._highest_priority()
|
||||||
|
|
||||||
|
if key in self._data:
|
||||||
|
self.remove(key)
|
||||||
|
|
||||||
|
self._priorities.append((priority, key))
|
||||||
|
# We rely on sort being stable
|
||||||
|
self._priorities.sort(key=lambda x: x[0])
|
||||||
|
self._data[key] = value
|
||||||
|
assert len(self._data) == len(self._priorities)
|
||||||
|
|
||||||
|
def remove(self, key: KT) -> VT:
|
||||||
|
"""Removes a key from the mapping.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The value associated with the key being removed
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: if the key is not in the mapping
|
||||||
|
"""
|
||||||
|
if key not in self._data:
|
||||||
|
raise KeyError(f"cannot find {key}")
|
||||||
|
|
||||||
|
popped_item = self._data.pop(key)
|
||||||
|
self._priorities = [(p, k) for p, k in self._priorities if k != key]
|
||||||
|
assert len(self._data) == len(self._priorities)
|
||||||
|
return popped_item
|
||||||
|
@@ -41,6 +41,16 @@ def __init__(self, dst, src_a=None, src_b=None):
|
|||||||
self.src_a = src_a
|
self.src_a = src_a
|
||||||
self.src_b = src_b
|
self.src_b = src_b
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
|
||||||
|
|
||||||
|
|
||||||
|
def _samefile(a: str, b: str):
|
||||||
|
try:
|
||||||
|
return os.path.samefile(a, b)
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class SourceMergeVisitor(BaseDirectoryVisitor):
|
class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||||
"""
|
"""
|
||||||
@@ -50,9 +60,14 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
|
|||||||
- A list of merge conflicts in dst/
|
- A list of merge conflicts in dst/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
def __init__(
|
||||||
|
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
|
||||||
|
):
|
||||||
self.ignore = ignore if ignore is not None else lambda f: False
|
self.ignore = ignore if ignore is not None else lambda f: False
|
||||||
|
|
||||||
|
# On case-insensitive filesystems, normalize paths to detect duplications
|
||||||
|
self.normalize_paths = normalize_paths
|
||||||
|
|
||||||
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
||||||
# bit to the relative path in the destination dir.
|
# bit to the relative path in the destination dir.
|
||||||
self.projection: str = ""
|
self.projection: str = ""
|
||||||
@@ -71,10 +86,88 @@ def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
|||||||
# and can run mkdir in order.
|
# and can run mkdir in order.
|
||||||
self.directories: Dict[str, Tuple[str, str]] = {}
|
self.directories: Dict[str, Tuple[str, str]] = {}
|
||||||
|
|
||||||
|
# If the visitor is configured to normalize paths, keep a map of
|
||||||
|
# normalized path to: original path, root directory + relative path
|
||||||
|
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
|
||||||
|
|
||||||
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
||||||
# are guaranteed to be grouped by src_root in the order they were visited.
|
# are guaranteed to be grouped by src_root in the order they were visited.
|
||||||
self.files: Dict[str, Tuple[str, str]] = {}
|
self.files: Dict[str, Tuple[str, str]] = {}
|
||||||
|
|
||||||
|
# If the visitor is configured to normalize paths, keep a map of
|
||||||
|
# normalized path to: original path, root directory + relative path
|
||||||
|
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
|
||||||
|
|
||||||
|
def _in_directories(self, proj_rel_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a path is already in the directory list
|
||||||
|
"""
|
||||||
|
if self.normalize_paths:
|
||||||
|
return proj_rel_path.lower() in self._directories_normalized
|
||||||
|
else:
|
||||||
|
return proj_rel_path in self.directories
|
||||||
|
|
||||||
|
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
||||||
|
"""
|
||||||
|
Get the directory that is mapped to a path
|
||||||
|
"""
|
||||||
|
if self.normalize_paths:
|
||||||
|
return self._directories_normalized[proj_rel_path.lower()]
|
||||||
|
else:
|
||||||
|
return (proj_rel_path, *self.directories[proj_rel_path])
|
||||||
|
|
||||||
|
def _del_directory(self, proj_rel_path: str):
|
||||||
|
"""
|
||||||
|
Remove a directory from the list of directories
|
||||||
|
"""
|
||||||
|
del self.directories[proj_rel_path]
|
||||||
|
if self.normalize_paths:
|
||||||
|
del self._directories_normalized[proj_rel_path.lower()]
|
||||||
|
|
||||||
|
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
|
||||||
|
"""
|
||||||
|
Add a directory to the list of directories.
|
||||||
|
Also stores the normalized version for later lookups
|
||||||
|
"""
|
||||||
|
self.directories[proj_rel_path] = (root, rel_path)
|
||||||
|
if self.normalize_paths:
|
||||||
|
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
||||||
|
|
||||||
|
def _in_files(self, proj_rel_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a path is already in the files list
|
||||||
|
"""
|
||||||
|
if self.normalize_paths:
|
||||||
|
return proj_rel_path.lower() in self._files_normalized
|
||||||
|
else:
|
||||||
|
return proj_rel_path in self.files
|
||||||
|
|
||||||
|
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
|
||||||
|
"""
|
||||||
|
Get the file that is mapped to a path
|
||||||
|
"""
|
||||||
|
if self.normalize_paths:
|
||||||
|
return self._files_normalized[proj_rel_path.lower()]
|
||||||
|
else:
|
||||||
|
return (proj_rel_path, *self.files[proj_rel_path])
|
||||||
|
|
||||||
|
def _del_file(self, proj_rel_path: str):
|
||||||
|
"""
|
||||||
|
Remove a file from the list of files
|
||||||
|
"""
|
||||||
|
del self.files[proj_rel_path]
|
||||||
|
if self.normalize_paths:
|
||||||
|
del self._files_normalized[proj_rel_path.lower()]
|
||||||
|
|
||||||
|
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
|
||||||
|
"""
|
||||||
|
Add a file to the list of files
|
||||||
|
Also stores the normalized version for later lookups
|
||||||
|
"""
|
||||||
|
self.files[proj_rel_path] = (root, rel_path)
|
||||||
|
if self.normalize_paths:
|
||||||
|
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
|
||||||
|
|
||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
"""
|
"""
|
||||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||||
@@ -84,23 +177,28 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|||||||
if self.ignore(rel_path):
|
if self.ignore(rel_path):
|
||||||
# Don't recurse when dir is ignored.
|
# Don't recurse when dir is ignored.
|
||||||
return False
|
return False
|
||||||
elif proj_rel_path in self.files:
|
elif self._in_files(proj_rel_path):
|
||||||
# Can't create a dir where a file is.
|
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
|
||||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
src_a = os.path.join(*self._file(proj_rel_path))
|
||||||
self.fatal_conflicts.append(
|
src_b = os.path.join(root, rel_path)
|
||||||
MergeConflict(
|
|
||||||
dst=proj_rel_path,
|
if not _samefile(src_a, src_b):
|
||||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
self.fatal_conflicts.append(
|
||||||
src_b=os.path.join(root, rel_path),
|
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||||
)
|
)
|
||||||
)
|
return False
|
||||||
return False
|
|
||||||
elif proj_rel_path in self.directories:
|
# Remove the link in favor of the dir.
|
||||||
|
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
||||||
|
self._del_file(existing_proj_rel_path)
|
||||||
|
self._add_directory(proj_rel_path, root, rel_path)
|
||||||
|
return True
|
||||||
|
elif self._in_directories(proj_rel_path):
|
||||||
# No new directory, carry on.
|
# No new directory, carry on.
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
# Register new directory.
|
# Register new directory.
|
||||||
self.directories[proj_rel_path] = (root, rel_path)
|
self._add_directory(proj_rel_path, root, rel_path)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
@@ -132,7 +230,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
|||||||
if handle_as_dir:
|
if handle_as_dir:
|
||||||
return self.before_visit_dir(root, rel_path, depth)
|
return self.before_visit_dir(root, rel_path, depth)
|
||||||
|
|
||||||
self.visit_file(root, rel_path, depth)
|
self.visit_file(root, rel_path, depth, symlink=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
||||||
@@ -140,30 +238,23 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
|||||||
|
|
||||||
if self.ignore(rel_path):
|
if self.ignore(rel_path):
|
||||||
pass
|
pass
|
||||||
elif proj_rel_path in self.directories:
|
elif self._in_directories(proj_rel_path):
|
||||||
# Can't create a file where a dir is; fatal error
|
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
|
||||||
self.fatal_conflicts.append(
|
# in which case we simply drop the symlink in favor of the actual dir.
|
||||||
MergeConflict(
|
src_a = os.path.join(*self._directory(proj_rel_path))
|
||||||
dst=proj_rel_path,
|
src_b = os.path.join(root, rel_path)
|
||||||
src_a=os.path.join(*self.directories[proj_rel_path]),
|
if not symlink or not _samefile(src_a, src_b):
|
||||||
src_b=os.path.join(root, rel_path),
|
self.fatal_conflicts.append(
|
||||||
|
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||||
)
|
)
|
||||||
)
|
elif self._in_files(proj_rel_path):
|
||||||
elif proj_rel_path in self.files:
|
|
||||||
# When two files project to the same path, they conflict iff they are distinct.
|
# When two files project to the same path, they conflict iff they are distinct.
|
||||||
# If they are the same (i.e. one links to the other), register regular files rather
|
# If they are the same (i.e. one links to the other), register regular files rather
|
||||||
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
||||||
# file, not the symlink.
|
# file, not the symlink.
|
||||||
|
src_a = os.path.join(*self._file(proj_rel_path))
|
||||||
src_a = os.path.join(*self.files[proj_rel_path])
|
|
||||||
src_b = os.path.join(root, rel_path)
|
src_b = os.path.join(root, rel_path)
|
||||||
|
if not _samefile(src_a, src_b):
|
||||||
try:
|
|
||||||
samefile = os.path.samefile(src_a, src_b)
|
|
||||||
except OSError:
|
|
||||||
samefile = False
|
|
||||||
|
|
||||||
if not samefile:
|
|
||||||
# Distinct files produce a conflict.
|
# Distinct files produce a conflict.
|
||||||
self.file_conflicts.append(
|
self.file_conflicts.append(
|
||||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||||
@@ -173,12 +264,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
|
|||||||
if not symlink:
|
if not symlink:
|
||||||
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
||||||
# order of the files dict, which is grouped by root.
|
# order of the files dict, which is grouped by root.
|
||||||
del self.files[proj_rel_path]
|
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
|
||||||
self.files[proj_rel_path] = (root, rel_path)
|
self._del_file(existing_proj_rel_path)
|
||||||
|
self._add_file(proj_rel_path, root, rel_path)
|
||||||
else:
|
else:
|
||||||
# Otherwise register this file to be linked.
|
# Otherwise register this file to be linked.
|
||||||
self.files[proj_rel_path] = (root, rel_path)
|
self._add_file(proj_rel_path, root, rel_path)
|
||||||
|
|
||||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||||
@@ -197,11 +288,11 @@ def set_projection(self, projection: str) -> None:
|
|||||||
path = ""
|
path = ""
|
||||||
for part in self.projection.split(os.sep):
|
for part in self.projection.split(os.sep):
|
||||||
path = os.path.join(path, part)
|
path = os.path.join(path, part)
|
||||||
if path not in self.files:
|
if not self._in_files(path):
|
||||||
self.directories[path] = ("<projection>", path)
|
self._add_directory(path, "<projection>", path)
|
||||||
else:
|
else:
|
||||||
# Can't create a dir where a file is.
|
# Can't create a dir where a file is.
|
||||||
src_a_root, src_a_relpath = self.files[path]
|
_, src_a_root, src_a_relpath = self._file(path)
|
||||||
self.fatal_conflicts.append(
|
self.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
dst=path,
|
dst=path,
|
||||||
@@ -227,8 +318,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
|
|||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
# If destination dir is a file in a src dir, add a conflict,
|
# If destination dir is a file in a src dir, add a conflict,
|
||||||
# and don't traverse deeper
|
# and don't traverse deeper
|
||||||
if rel_path in self.src.files:
|
if self.src._in_files(rel_path):
|
||||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
@@ -238,8 +329,9 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|||||||
|
|
||||||
# If destination dir was also a src dir, remove the mkdir
|
# If destination dir was also a src dir, remove the mkdir
|
||||||
# action, and traverse deeper.
|
# action, and traverse deeper.
|
||||||
if rel_path in self.src.directories:
|
if self.src._in_directories(rel_path):
|
||||||
del self.src.directories[rel_path]
|
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
|
||||||
|
self.src._del_directory(existing_proj_rel_path)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# If the destination dir does not appear in the src dir,
|
# If the destination dir does not appear in the src dir,
|
||||||
@@ -252,38 +344,24 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
|||||||
be seen as files; we should not accidentally merge
|
be seen as files; we should not accidentally merge
|
||||||
source dir with a symlinked dest dir.
|
source dir with a symlinked dest dir.
|
||||||
"""
|
"""
|
||||||
# Always conflict
|
|
||||||
if rel_path in self.src.directories:
|
|
||||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
|
||||||
self.src.fatal_conflicts.append(
|
|
||||||
MergeConflict(
|
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if rel_path in self.src.files:
|
self.visit_file(root, rel_path, depth)
|
||||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
|
||||||
self.src.fatal_conflicts.append(
|
|
||||||
MergeConflict(
|
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Never descend into symlinked target dirs.
|
# Never descend into symlinked target dirs.
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
# Can't merge a file if target already exists
|
# Can't merge a file if target already exists
|
||||||
if rel_path in self.src.directories:
|
if self.src._in_directories(rel_path):
|
||||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
elif rel_path in self.src.files:
|
elif self.src._in_files(rel_path):
|
||||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
_, src_a_root, src_a_relpath = self.src._file(rel_path)
|
||||||
self.src.fatal_conflicts.append(
|
self.src.fatal_conflicts.append(
|
||||||
MergeConflict(
|
MergeConflict(
|
||||||
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
|
||||||
@@ -308,7 +386,7 @@ class LinkTree:
|
|||||||
|
|
||||||
def __init__(self, source_root):
|
def __init__(self, source_root):
|
||||||
if not os.path.exists(source_root):
|
if not os.path.exists(source_root):
|
||||||
raise IOError("No such file or directory: '%s'", source_root)
|
raise OSError("No such file or directory: '%s'", source_root)
|
||||||
|
|
||||||
self._root = source_root
|
self._root = source_root
|
||||||
|
|
||||||
|
@@ -269,7 +269,7 @@ def __init__(
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _poll_interval_generator(
|
def _poll_interval_generator(
|
||||||
_wait_times: Optional[Tuple[float, float, float]] = None
|
_wait_times: Optional[Tuple[float, float, float]] = None,
|
||||||
) -> Generator[float, None, None]:
|
) -> Generator[float, None, None]:
|
||||||
"""This implements a backoff scheme for polling a contended resource
|
"""This implements a backoff scheme for polling a contended resource
|
||||||
by suggesting a succession of wait times between polls.
|
by suggesting a succession of wait times between polls.
|
||||||
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
# EAGAIN and EACCES == locked by another process (so try again)
|
# EAGAIN and EACCES == locked by another process (so try again)
|
||||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||||
raise
|
raise
|
||||||
|
@@ -2,8 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
"""Utility classes for logging the output of blocks of code.
|
"""Utility classes for logging the output of blocks of code."""
|
||||||
"""
|
|
||||||
import atexit
|
import atexit
|
||||||
import ctypes
|
import ctypes
|
||||||
import errno
|
import errno
|
||||||
@@ -344,26 +343,6 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def replace_environment(env):
|
|
||||||
"""Replace the current environment (`os.environ`) with `env`.
|
|
||||||
|
|
||||||
If `env` is empty (or None), this unsets all current environment
|
|
||||||
variables.
|
|
||||||
"""
|
|
||||||
env = env or {}
|
|
||||||
old_env = os.environ.copy()
|
|
||||||
try:
|
|
||||||
os.environ.clear()
|
|
||||||
for name, val in env.items():
|
|
||||||
os.environ[name] = val
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
os.environ.clear()
|
|
||||||
for name, val in old_env.items():
|
|
||||||
os.environ[name] = val
|
|
||||||
|
|
||||||
|
|
||||||
def log_output(*args, **kwargs):
|
def log_output(*args, **kwargs):
|
||||||
"""Context manager that logs its output to a file.
|
"""Context manager that logs its output to a file.
|
||||||
|
|
||||||
@@ -447,7 +426,6 @@ def __init__(
|
|||||||
self.echo = echo
|
self.echo = echo
|
||||||
self.debug = debug
|
self.debug = debug
|
||||||
self.buffer = buffer
|
self.buffer = buffer
|
||||||
self.env = env # the environment to use for _writer_daemon
|
|
||||||
self.filter_fn = filter_fn
|
self.filter_fn = filter_fn
|
||||||
|
|
||||||
self._active = False # used to prevent re-entry
|
self._active = False # used to prevent re-entry
|
||||||
@@ -519,21 +497,20 @@ def __enter__(self):
|
|||||||
# just don't forward input if this fails
|
# just don't forward input if this fails
|
||||||
pass
|
pass
|
||||||
|
|
||||||
with replace_environment(self.env):
|
self.process = multiprocessing.Process(
|
||||||
self.process = multiprocessing.Process(
|
target=_writer_daemon,
|
||||||
target=_writer_daemon,
|
args=(
|
||||||
args=(
|
input_fd,
|
||||||
input_fd,
|
read_fd,
|
||||||
read_fd,
|
self.write_fd,
|
||||||
self.write_fd,
|
self.echo,
|
||||||
self.echo,
|
self.log_file,
|
||||||
self.log_file,
|
child_pipe,
|
||||||
child_pipe,
|
self.filter_fn,
|
||||||
self.filter_fn,
|
),
|
||||||
),
|
)
|
||||||
)
|
self.process.daemon = True # must set before start()
|
||||||
self.process.daemon = True # must set before start()
|
self.process.start()
|
||||||
self.process.start()
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if input_fd:
|
if input_fd:
|
||||||
@@ -729,10 +706,7 @@ class winlog:
|
|||||||
Does not support the use of 'v' toggling as nixlog does.
|
Does not support the use of 'v' toggling as nixlog does.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
|
||||||
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
|
|
||||||
):
|
|
||||||
self.env = env
|
|
||||||
self.debug = debug
|
self.debug = debug
|
||||||
self.echo = echo
|
self.echo = echo
|
||||||
self.logfile = file_like
|
self.logfile = file_like
|
||||||
@@ -789,11 +763,10 @@ def background_reader(reader, echo_writer, _kill):
|
|||||||
reader.close()
|
reader.close()
|
||||||
|
|
||||||
self._active = True
|
self._active = True
|
||||||
with replace_environment(self.env):
|
self._thread = Thread(
|
||||||
self._thread = Thread(
|
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
||||||
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
|
)
|
||||||
)
|
self._thread.start()
|
||||||
self._thread.start()
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
@@ -918,7 +891,7 @@ def _writer_daemon(
|
|||||||
try:
|
try:
|
||||||
if stdin_file.read(1) == "v":
|
if stdin_file.read(1) == "v":
|
||||||
echo = not echo
|
echo = not echo
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
# If SIGTTIN is ignored, the system gives EIO
|
# If SIGTTIN is ignored, the system gives EIO
|
||||||
# to let the caller know the read failed b/c it
|
# to let the caller know the read failed b/c it
|
||||||
# was in the bg. Ignore that too.
|
# was in the bg. Ignore that too.
|
||||||
@@ -1013,7 +986,7 @@ def wrapped(*args, **kwargs):
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
return function(*args, **kwargs)
|
return function(*args, **kwargs)
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.EINTR:
|
if e.errno == errno.EINTR:
|
||||||
continue
|
continue
|
||||||
raise
|
raise
|
||||||
|
@@ -10,9 +10,21 @@
|
|||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.24.0.dev0"
|
__version__ = "1.0.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||||
|
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||||
|
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||||
|
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||||
|
#: Spack version.
|
||||||
|
package_api_version = (1, 0)
|
||||||
|
|
||||||
|
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||||
|
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||||
|
#: compatibility with vX.0.
|
||||||
|
min_package_api_version = (1, 0)
|
||||||
|
|
||||||
|
|
||||||
def __try_int(v):
|
def __try_int(v):
|
||||||
try:
|
try:
|
||||||
@@ -79,4 +91,6 @@ def get_short_version() -> str:
|
|||||||
"get_version",
|
"get_version",
|
||||||
"get_spack_commit",
|
"get_spack_commit",
|
||||||
"get_short_version",
|
"get_short_version",
|
||||||
|
"package_api_version",
|
||||||
|
"min_package_api_version",
|
||||||
]
|
]
|
||||||
|
@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
|||||||
for dep_name, dep in deps_by_name.items():
|
for dep_name, dep in deps_by_name.items():
|
||||||
|
|
||||||
def check_virtual_with_variants(spec, msg):
|
def check_virtual_with_variants(spec, msg):
|
||||||
if not spec.virtual or not spec.variants:
|
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
|
||||||
return
|
return
|
||||||
error = error_cls(
|
error = error_cls(
|
||||||
f"{pkg_name}: {msg}",
|
f"{pkg_name}: {msg}",
|
||||||
@@ -1356,14 +1356,8 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
|||||||
|
|
||||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||||
result = []
|
result = []
|
||||||
# Check items are of the same type
|
|
||||||
if not isinstance(_detected, type(_expected)):
|
|
||||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
|
||||||
_details = [f"{_detected} was detected instead"]
|
|
||||||
return [error_cls(summary=_summary, details=_details)]
|
|
||||||
|
|
||||||
# If they are string expected is a regex
|
# If they are string expected is a regex
|
||||||
if isinstance(_expected, str):
|
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||||
try:
|
try:
|
||||||
_regex = re.compile(_expected)
|
_regex = re.compile(_expected)
|
||||||
except re.error:
|
except re.error:
|
||||||
@@ -1379,7 +1373,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
|||||||
_details = [f"{_detected} does not match the regex"]
|
_details = [f"{_detected} does not match the regex"]
|
||||||
return [error_cls(summary=_summary, details=_details)]
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
if isinstance(_expected, dict):
|
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||||
if _not_detected:
|
if _not_detected:
|
||||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||||
@@ -1394,6 +1388,10 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
|||||||
result.extend(
|
result.extend(
|
||||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||||
|
_details = [f"{_detected} was detected instead"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@@ -5,6 +5,7 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
|
import contextlib
|
||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
@@ -23,7 +24,7 @@
|
|||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -91,6 +92,9 @@
|
|||||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||||
|
|
||||||
|
|
||||||
|
INDEX_HASH_FILE = "index.json.hash"
|
||||||
|
|
||||||
|
|
||||||
class BuildCacheDatabase(spack_db.Database):
|
class BuildCacheDatabase(spack_db.Database):
|
||||||
"""A database for binary buildcaches.
|
"""A database for binary buildcaches.
|
||||||
|
|
||||||
@@ -502,7 +506,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
|||||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||||
|
|
||||||
if scheme != "oci" and not web_util.url_exists(
|
if scheme != "oci" and not web_util.url_exists(
|
||||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -669,19 +673,24 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def _read_specs_and_push_index(
|
def _read_specs_and_push_index(
|
||||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
file_list: List[str],
|
||||||
|
read_method: Callable,
|
||||||
|
cache_prefix: str,
|
||||||
|
db: BuildCacheDatabase,
|
||||||
|
temp_dir: str,
|
||||||
|
concurrency: int,
|
||||||
):
|
):
|
||||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||||
generate the index, and push it to the mirror.
|
generate the index, and push it to the mirror.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
file_list: List of urls or file paths pointing at spec files to read
|
||||||
read_method: A function taking a single argument, either a url or a file path,
|
read_method: A function taking a single argument, either a url or a file path,
|
||||||
and which reads the spec file at that location, and returns the spec.
|
and which reads the spec file at that location, and returns the spec.
|
||||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
||||||
db: A spack database used for adding specs and then writing the index.
|
db: A spack database used for adding specs and then writing the index.
|
||||||
temp_dir (str): Location to write index.json and hash for pushing
|
temp_dir: Location to write index.json and hash for pushing
|
||||||
concurrency (int): Number of parallel processes to use when fetching
|
concurrency: Number of parallel processes to use when fetching
|
||||||
"""
|
"""
|
||||||
for file in file_list:
|
for file in file_list:
|
||||||
contents = read_method(file)
|
contents = read_method(file)
|
||||||
@@ -699,7 +708,7 @@ def _read_specs_and_push_index(
|
|||||||
|
|
||||||
# Now generate the index, compute its hash, and push the two files to
|
# Now generate the index, compute its hash, and push the two files to
|
||||||
# the mirror.
|
# the mirror.
|
||||||
index_json_path = os.path.join(temp_dir, "index.json")
|
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
@@ -709,14 +718,14 @@ def _read_specs_and_push_index(
|
|||||||
index_hash = compute_hash(index_string)
|
index_hash = compute_hash(index_string)
|
||||||
|
|
||||||
# Write the hash out to a local file
|
# Write the hash out to a local file
|
||||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||||
f.write(index_hash)
|
f.write(index_hash)
|
||||||
|
|
||||||
# Push the index itself
|
# Push the index itself
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
index_json_path,
|
index_json_path,
|
||||||
url_util.join(cache_prefix, "index.json"),
|
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||||
)
|
)
|
||||||
@@ -724,7 +733,7 @@ def _read_specs_and_push_index(
|
|||||||
# Push the hash
|
# Push the hash
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
index_hash_path,
|
index_hash_path,
|
||||||
url_util.join(cache_prefix, "index.json.hash"),
|
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||||
)
|
)
|
||||||
@@ -793,7 +802,7 @@ def url_read_method(url):
|
|||||||
try:
|
try:
|
||||||
_, _, spec_file = web_util.read_from_url(url)
|
_, _, spec_file = web_util.read_from_url(url)
|
||||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||||
except web_util.SpackWebError as e:
|
except (web_util.SpackWebError, OSError) as e:
|
||||||
tty.error(f"Error reading specfile: {url}: {e}")
|
tty.error(f"Error reading specfile: {url}: {e}")
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
@@ -861,9 +870,12 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
|||||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||||
|
|
||||||
db = BuildCacheDatabase(tmpdir)
|
db = BuildCacheDatabase(tmpdir)
|
||||||
|
db._write()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
_read_specs_and_push_index(
|
||||||
|
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||||
|
|
||||||
@@ -911,7 +923,7 @@ class FileTypes:
|
|||||||
UNKNOWN = 2
|
UNKNOWN = 2
|
||||||
|
|
||||||
|
|
||||||
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
|
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
|
||||||
|
|
||||||
|
|
||||||
def file_type(f: IO[bytes]) -> int:
|
def file_type(f: IO[bytes]) -> int:
|
||||||
@@ -1777,7 +1789,7 @@ def _oci_update_index(
|
|||||||
db.mark(spec, "in_buildcache", True)
|
db.mark(spec, "in_buildcache", True)
|
||||||
|
|
||||||
# Create the index.json file
|
# Create the index.json file
|
||||||
index_json_path = os.path.join(tmpdir, "index.json")
|
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
@@ -1998,7 +2010,7 @@ def fetch_url_to_mirror(url):
|
|||||||
|
|
||||||
# Download the config = spec.json and the relevant tarball
|
# Download the config = spec.json and the relevant tarball
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.load(response)
|
||||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||||
tarball_digest = spack.oci.image.Digest.from_string(
|
tarball_digest = spack.oci.image.Digest.from_string(
|
||||||
manifest["layers"][-1]["digest"]
|
manifest["layers"][-1]["digest"]
|
||||||
@@ -2125,10 +2137,9 @@ def fetch_url_to_mirror(url):
|
|||||||
|
|
||||||
|
|
||||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||||
"""Updates a buildinfo dict for old archives that did
|
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
||||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
||||||
when relocating files in parallel and in-place. This
|
when relocating."""
|
||||||
means we must preserve inodes when relocating."""
|
|
||||||
|
|
||||||
# New archives don't need this.
|
# New archives don't need this.
|
||||||
if buildinfo.get("hardlinks_deduped", False):
|
if buildinfo.get("hardlinks_deduped", False):
|
||||||
@@ -2157,69 +2168,47 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
|||||||
buildinfo[key] = new_list
|
buildinfo[key] = new_list
|
||||||
|
|
||||||
|
|
||||||
def relocate_package(spec):
|
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||||
"""
|
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||||
Relocate the given package
|
spec_prefix = str(spec.prefix)
|
||||||
"""
|
buildinfo = read_buildinfo_file(spec_prefix)
|
||||||
workdir = str(spec.prefix)
|
|
||||||
buildinfo = read_buildinfo_file(workdir)
|
|
||||||
new_layout_root = str(spack.store.STORE.layout.root)
|
|
||||||
new_prefix = str(spec.prefix)
|
|
||||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
|
||||||
new_spack_prefix = str(spack.paths.prefix)
|
|
||||||
|
|
||||||
old_sbang_install_path = None
|
|
||||||
if "sbang_install_path" in buildinfo:
|
|
||||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
|
||||||
old_layout_root = str(buildinfo["buildpath"])
|
old_layout_root = str(buildinfo["buildpath"])
|
||||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
|
||||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
|
||||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
|
||||||
|
|
||||||
# Warn about old style tarballs created with the now removed --rel flag.
|
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||||
if buildinfo.get("relative_rpaths", False):
|
if buildinfo.get("relative_rpaths", False):
|
||||||
tty.warn(
|
tty.warn(
|
||||||
f"Tarball for {spec} uses relative rpaths, " "which can cause library loading issues."
|
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
||||||
)
|
)
|
||||||
|
|
||||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
||||||
# were not unique.
|
# prefixes were not unique.
|
||||||
if "hash_to_prefix" in buildinfo:
|
if "hash_to_prefix" in buildinfo:
|
||||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||||
elif "prefix_to_hash" in buildinfo:
|
elif "prefix_to_hash" in buildinfo:
|
||||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
||||||
else:
|
else:
|
||||||
hash_to_old_prefix = dict()
|
raise NewLayoutException(
|
||||||
|
"Package tarball was created from an install prefix with a different directory layout "
|
||||||
|
"and an older buildcache create implementation. It cannot be relocated."
|
||||||
|
)
|
||||||
|
|
||||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
prefix_to_prefix: Dict[str, str] = {}
|
||||||
msg = "Package tarball was created from an install "
|
|
||||||
msg += "prefix with a different directory layout and an older "
|
|
||||||
msg += "buildcache create implementation. It cannot be relocated."
|
|
||||||
raise NewLayoutException(msg)
|
|
||||||
|
|
||||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
if "sbang_install_path" in buildinfo:
|
||||||
# For example, the new sbang can be longer than the old one.
|
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||||
# Hence 2 dictionaries are maintained here.
|
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
||||||
prefix_to_prefix_text = collections.OrderedDict()
|
|
||||||
prefix_to_prefix_bin = collections.OrderedDict()
|
|
||||||
|
|
||||||
if old_sbang_install_path:
|
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
||||||
install_path = spack.hooks.sbang.sbang_install_path()
|
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
||||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
# the new spack store root.
|
||||||
|
|
||||||
# First match specific prefix paths. Possibly the *local* install prefix
|
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
||||||
# of some dependency is in an upstream, so we cannot assume the original
|
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
||||||
# spack store root can be mapped uniformly to the new spack store root.
|
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
||||||
#
|
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
||||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
||||||
# from the old install_tree to the new install_tree and from the build_spec
|
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
||||||
# to the spliced spec.
|
# that s replaced when we spliced.
|
||||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
|
||||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
|
||||||
# without any explicit check for whether the spec is spliced.
|
|
||||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
|
||||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
|
||||||
# is the spec that s replaced when we spliced.
|
|
||||||
relocation_specs = specs_to_relocate(spec)
|
relocation_specs = specs_to_relocate(spec)
|
||||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||||
for s in relocation_specs:
|
for s in relocation_specs:
|
||||||
@@ -2239,72 +2228,66 @@ def relocate_package(spec):
|
|||||||
lookup_dag_hash = analog.dag_hash()
|
lookup_dag_hash = analog.dag_hash()
|
||||||
if lookup_dag_hash in hash_to_old_prefix:
|
if lookup_dag_hash in hash_to_old_prefix:
|
||||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
||||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
|
||||||
|
|
||||||
# Only then add the generic fallback of install prefix -> install prefix.
|
# Only then add the generic fallback of install prefix -> install prefix.
|
||||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
||||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
|
||||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
|
||||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
|
||||||
|
|
||||||
# This is vestigial code for the *old* location of sbang. Previously,
|
# Delete identity mappings from prefix_to_prefix
|
||||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||||
# now a POSIX script that lives in the install prefix. Old packages
|
|
||||||
# will have the old sbang location in their shebangs.
|
|
||||||
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
|
||||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
|
||||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
|
||||||
|
|
||||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
# If there's nothing to relocate, we're done.
|
||||||
|
if not prefix_to_prefix:
|
||||||
|
return
|
||||||
|
|
||||||
|
for old, new in prefix_to_prefix.items():
|
||||||
|
tty.debug(f"Relocating: {old} => {new}.")
|
||||||
|
|
||||||
# Old archives may have hardlinks repeated.
|
# Old archives may have hardlinks repeated.
|
||||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
||||||
|
|
||||||
# Text files containing the prefix text
|
# Text files containing the prefix text
|
||||||
text_names = [os.path.join(workdir, f) for f in buildinfo["relocate_textfiles"]]
|
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||||
|
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||||
|
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||||
|
|
||||||
# If we are not installing back to the same install tree do the relocation
|
platform = spack.platforms.by_name(spec.platform)
|
||||||
if old_prefix != new_prefix:
|
if "macho" in platform.binary_formats:
|
||||||
files_to_relocate = [
|
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
||||||
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
elif "elf" in platform.binary_formats:
|
||||||
]
|
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
||||||
# If the buildcache was not created with relativized rpaths
|
|
||||||
# do the relocation of path in binaries
|
|
||||||
platform = spack.platforms.by_name(spec.platform)
|
|
||||||
if "macho" in platform.binary_formats:
|
|
||||||
relocate.relocate_macho_binaries(files_to_relocate, prefix_to_prefix_bin)
|
|
||||||
elif "elf" in platform.binary_formats:
|
|
||||||
# The new ELF dynamic section relocation logic only handles absolute to
|
|
||||||
# absolute relocation.
|
|
||||||
relocate.relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
|
||||||
|
|
||||||
# Relocate links to the new install prefix
|
relocate.relocate_links(links, prefix_to_prefix)
|
||||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
relocate.relocate_text(textfiles, prefix_to_prefix)
|
||||||
relocate.relocate_links(links, prefix_to_prefix_bin)
|
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
||||||
|
|
||||||
# For all buildcaches
|
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||||
# relocate the install prefixes in text files including dependencies
|
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
codesign = which("codesign")
|
||||||
|
if not codesign:
|
||||||
|
return
|
||||||
|
for binary in changed_files:
|
||||||
|
# preserve the original inode by running codesign on a copy
|
||||||
|
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||||
|
codesign("-fs-", tmp_binary)
|
||||||
|
|
||||||
# relocate the install prefixes in binary files including dependencies
|
install_manifest = os.path.join(
|
||||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
spec.prefix,
|
||||||
|
spack.store.STORE.layout.metadata_dir,
|
||||||
|
spack.store.STORE.layout.manifest_file_name,
|
||||||
|
)
|
||||||
|
if not os.path.exists(install_manifest):
|
||||||
|
spec_id = spec.format("{name}/{hash:7}")
|
||||||
|
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||||
|
|
||||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
# overwrite old metadata with new
|
||||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
if spec.spliced:
|
||||||
codesign = which("codesign")
|
# rewrite spec on disk
|
||||||
if not codesign:
|
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||||
return
|
|
||||||
for binary in changed_files:
|
|
||||||
# preserve the original inode by running codesign on a copy
|
|
||||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
|
||||||
codesign("-fs-", tmp_binary)
|
|
||||||
|
|
||||||
# If we are installing back to the same location
|
# de-cache the install manifest
|
||||||
# relocate the sbang location if the spack directory changed
|
with contextlib.suppress(FileNotFoundError):
|
||||||
else:
|
os.unlink(install_manifest)
|
||||||
if old_spack_prefix != new_spack_prefix:
|
|
||||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||||
@@ -2472,15 +2455,6 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||||
raise e
|
raise e
|
||||||
else:
|
|
||||||
manifest_file = os.path.join(
|
|
||||||
spec.prefix,
|
|
||||||
spack.store.STORE.layout.metadata_dir,
|
|
||||||
spack.store.STORE.layout.manifest_file_name,
|
|
||||||
)
|
|
||||||
if not os.path.exists(manifest_file):
|
|
||||||
spec_id = spec.format("{name}/{hash:7}")
|
|
||||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
|
||||||
finally:
|
finally:
|
||||||
if tmpdir:
|
if tmpdir:
|
||||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
@@ -2555,10 +2529,10 @@ def install_root_node(
|
|||||||
allow_missing: when true, allows installing a node with missing dependencies
|
allow_missing: when true, allows installing a node with missing dependencies
|
||||||
"""
|
"""
|
||||||
# Early termination
|
# Early termination
|
||||||
if spec.external or spec.virtual:
|
if spec.external or not spec.concrete:
|
||||||
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
|
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
|
||||||
return
|
return
|
||||||
elif spec.concrete and spec.installed and not force:
|
elif spec.installed and not force:
|
||||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -2585,10 +2559,6 @@ def install_root_node(
|
|||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
spec.package.windows_establish_runtime_linkage()
|
spec.package.windows_establish_runtime_linkage()
|
||||||
if spec.spliced: # overwrite old metadata with new
|
|
||||||
spack.store.STORE.layout.write_spec(
|
|
||||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
|
||||||
)
|
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||||
|
|
||||||
@@ -2626,11 +2596,14 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||||
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
specfile_is_signed = True
|
specfile_is_signed = True
|
||||||
except web_util.SpackWebError as e1:
|
except (web_util.SpackWebError, OSError) as e1:
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||||
except web_util.SpackWebError as e2:
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
|
specfile_is_signed = False
|
||||||
|
except (web_util.SpackWebError, OSError) as e2:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||||
e1,
|
e1,
|
||||||
@@ -2640,7 +2613,6 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
|
||||||
|
|
||||||
# read the spec from the build cache file. All specs in build caches
|
# read the spec from the build cache file. All specs in build caches
|
||||||
# are concrete (as they are built) so we need to mark this spec
|
# are concrete (as they are built) so we need to mark this spec
|
||||||
@@ -2734,8 +2706,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_, _, json_file = web_util.read_from_url(keys_index)
|
_, _, json_file = web_util.read_from_url(keys_index)
|
||||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
json_index = sjson.load(json_file)
|
||||||
except web_util.SpackWebError as url_err:
|
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||||
|
# TODO: avoid repeated request
|
||||||
if web_util.url_exists(keys_index):
|
if web_util.url_exists(keys_index):
|
||||||
tty.error(
|
tty.error(
|
||||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||||
@@ -2982,14 +2955,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
|||||||
|
|
||||||
def get_remote_hash(self):
|
def get_remote_hash(self):
|
||||||
# Failure to fetch index.json.hash is not fatal
|
# Failure to fetch index.json.hash is not fatal
|
||||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError):
|
remote_hash = response.read(64)
|
||||||
|
except OSError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Validate the hash
|
# Validate the hash
|
||||||
remote_hash = response.read(64)
|
|
||||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||||
return None
|
return None
|
||||||
return remote_hash.decode("utf-8")
|
return remote_hash.decode("utf-8")
|
||||||
@@ -3003,17 +2976,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
|
|
||||||
# Otherwise, download index.json
|
# Otherwise, download index.json
|
||||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e:
|
||||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except (ValueError, OSError) as e:
|
||||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||||
|
|
||||||
computed_hash = compute_hash(result)
|
computed_hash = compute_hash(result)
|
||||||
|
|
||||||
@@ -3047,7 +3020,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
|||||||
|
|
||||||
def conditional_fetch(self) -> FetchIndexResult:
|
def conditional_fetch(self) -> FetchIndexResult:
|
||||||
# Just do a conditional fetch immediately
|
# Just do a conditional fetch immediately
|
||||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -3057,12 +3030,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
# Not modified; that means fresh.
|
# Not modified; that means fresh.
|
||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e: # URLError, socket.timeout, etc.
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except (ValueError, OSError) as e:
|
||||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||||
|
|
||||||
headers = response.headers
|
headers = response.headers
|
||||||
@@ -3094,11 +3067,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e:
|
||||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.load(response)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
@@ -3113,14 +3086,16 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
|
|
||||||
# Otherwise fetch the blob / index.json
|
# Otherwise fetch the blob / index.json
|
||||||
response = self.urlopen(
|
try:
|
||||||
urllib.request.Request(
|
response = self.urlopen(
|
||||||
url=self.ref.blob_url(index_digest),
|
urllib.request.Request(
|
||||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
url=self.ref.blob_url(index_digest),
|
||||||
|
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
|
except (OSError, ValueError) as e:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
# Make sure the blob we download has the advertised hash
|
# Make sure the blob we download has the advertised hash
|
||||||
if compute_hash(result) != index_digest.digest:
|
if compute_hash(result) != index_digest.digest:
|
||||||
|
@@ -5,12 +5,14 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import importlib
|
import importlib
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Dict, Optional, Sequence, Union
|
from typing import Optional, Sequence, Union
|
||||||
|
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -18,13 +20,17 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
from .config import spec_for_current_python
|
from .config import spec_for_current_python
|
||||||
|
|
||||||
QueryInfo = Dict[str, "spack.spec.Spec"]
|
|
||||||
|
class QueryInfo(TypedDict, total=False):
|
||||||
|
spec: spack.spec.Spec
|
||||||
|
command: spack.util.executable.Executable
|
||||||
|
|
||||||
|
|
||||||
def _python_import(module: str) -> bool:
|
def _python_import(module: str) -> bool:
|
||||||
@@ -211,7 +217,9 @@ def _executables_in_store(
|
|||||||
):
|
):
|
||||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||||
if query_info is not None:
|
if query_info is not None:
|
||||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
query_info["command"] = spack.util.executable.which(
|
||||||
|
*executables, path=bin_dir, required=True
|
||||||
|
)
|
||||||
query_info["spec"] = concrete_spec
|
query_info["spec"] = concrete_spec
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@@ -27,9 +27,9 @@
|
|||||||
class ClingoBootstrapConcretizer:
|
class ClingoBootstrapConcretizer:
|
||||||
def __init__(self, configuration):
|
def __init__(self, configuration):
|
||||||
self.host_platform = spack.platforms.host()
|
self.host_platform = spack.platforms.host()
|
||||||
self.host_os = self.host_platform.operating_system("frontend")
|
self.host_os = self.host_platform.default_operating_system()
|
||||||
self.host_target = archspec.cpu.host().family
|
self.host_target = archspec.cpu.host().family
|
||||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
||||||
self.host_architecture.target = str(self.host_target)
|
self.host_architecture.target = str(self.host_target)
|
||||||
self.host_compiler = self._valid_compiler_or_raise()
|
self.host_compiler = self._valid_compiler_or_raise()
|
||||||
self.host_python = self.python_external_spec()
|
self.host_python = self.python_external_spec()
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
"""Manage configuration swapping for bootstrapping purposes"""
|
"""Manage configuration swapping for bootstrapping purposes"""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import os.path
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||||
|
|
||||||
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
|||||||
|
|
||||||
|
|
||||||
def _add_compilers_if_missing() -> None:
|
def _add_compilers_if_missing() -> None:
|
||||||
arch = spack.spec.ArchSpec.frontend_arch()
|
arch = spack.spec.ArchSpec.default_arch()
|
||||||
if not spack.compilers.compilers_for_arch(arch):
|
if not spack.compilers.compilers_for_arch(arch):
|
||||||
spack.compilers.find_compilers()
|
spack.compilers.find_compilers()
|
||||||
|
|
||||||
|
@@ -25,7 +25,6 @@
|
|||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||||
@@ -34,8 +33,10 @@
|
|||||||
from llnl.util.lang import GroupedExceptionHandler
|
from llnl.util.lang import GroupedExceptionHandler
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.detection
|
import spack.detection
|
||||||
|
import spack.error
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -44,10 +45,17 @@
|
|||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
|
import spack.util.url
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
|
|
||||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
from ._common import (
|
||||||
|
QueryInfo,
|
||||||
|
_executables_in_store,
|
||||||
|
_python_import,
|
||||||
|
_root_spec,
|
||||||
|
_try_import_from_store,
|
||||||
|
)
|
||||||
from .clingo import ClingoBootstrapConcretizer
|
from .clingo import ClingoBootstrapConcretizer
|
||||||
from .config import spack_python_interpreter, spec_for_current_python
|
from .config import spack_python_interpreter, spec_for_current_python
|
||||||
|
|
||||||
@@ -89,8 +97,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
|||||||
self.name = conf["name"]
|
self.name = conf["name"]
|
||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
# Promote (relative) paths to file urls
|
# Check for relative paths, and turn them into absolute paths
|
||||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
# root is the metadata_dir
|
||||||
|
maybe_url = conf["info"]["url"]
|
||||||
|
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||||
|
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||||
|
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||||
@@ -134,7 +146,7 @@ class BuildcacheBootstrapper(Bootstrapper):
|
|||||||
|
|
||||||
def __init__(self, conf) -> None:
|
def __init__(self, conf) -> None:
|
||||||
super().__init__(conf)
|
super().__init__(conf)
|
||||||
self.last_search: Optional[ConfigDictionary] = None
|
self.last_search: Optional[QueryInfo] = None
|
||||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -211,14 +223,14 @@ def _install_and_test(
|
|||||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||||
|
|
||||||
info: ConfigDictionary = {}
|
info: QueryInfo = {}
|
||||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary
|
info: QueryInfo
|
||||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
return True
|
return True
|
||||||
@@ -231,7 +243,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||||
|
|
||||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary
|
info: QueryInfo
|
||||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -249,11 +261,11 @@ class SourceBootstrapper(Bootstrapper):
|
|||||||
|
|
||||||
def __init__(self, conf) -> None:
|
def __init__(self, conf) -> None:
|
||||||
super().__init__(conf)
|
super().__init__(conf)
|
||||||
self.last_search: Optional[ConfigDictionary] = None
|
self.last_search: Optional[QueryInfo] = None
|
||||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||||
|
|
||||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary = {}
|
info: QueryInfo = {}
|
||||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -270,17 +282,22 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||||
concrete_spec = bootstrapper.concretize()
|
concrete_spec = bootstrapper.concretize()
|
||||||
else:
|
else:
|
||||||
concrete_spec = spack.spec.Spec(
|
abstract_spec = spack.spec.Spec(
|
||||||
abstract_spec_str + " ^" + spec_for_current_python()
|
abstract_spec_str + " ^" + spec_for_current_python()
|
||||||
)
|
)
|
||||||
concrete_spec.concretize()
|
concrete_spec = spack.concretize.concretize_one(abstract_spec)
|
||||||
|
|
||||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||||
tty.debug(msg.format(module, abstract_spec_str))
|
tty.debug(msg.format(module, abstract_spec_str))
|
||||||
|
|
||||||
# Install the spec that should make the module importable
|
# Install the spec that should make the module importable
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
PackageInstaller(
|
||||||
|
[concrete_spec.package],
|
||||||
|
fail_fast=True,
|
||||||
|
package_use_cache=False,
|
||||||
|
dependencies_use_cache=False,
|
||||||
|
).install()
|
||||||
|
|
||||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -288,7 +305,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary = {}
|
info: QueryInfo = {}
|
||||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -299,7 +316,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
|||||||
# might reduce compilation time by a fair amount
|
# might reduce compilation time by a fair amount
|
||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
|
|
||||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
concrete_spec = spack.concretize.concretize_one(abstract_spec_str)
|
||||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
tty.debug(msg.format(abstract_spec_str))
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
@@ -316,11 +333,9 @@ def create_bootstrapper(conf: ConfigDictionary):
|
|||||||
return _bootstrap_methods[btype](conf)
|
return _bootstrap_methods[btype](conf)
|
||||||
|
|
||||||
|
|
||||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
||||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
"""Returns true if the source is not enabled for bootstrapping"""
|
||||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
||||||
if not trusted.get(name, False):
|
|
||||||
raise ValueError("source is not trusted")
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||||
@@ -350,24 +365,24 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
|||||||
exception_handler = GroupedExceptionHandler()
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
|
if not source_is_enabled(current_config):
|
||||||
|
continue
|
||||||
|
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
source_is_enabled_or_raise(current_config)
|
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||||
current_bootstrapper = create_bootstrapper(current_config)
|
|
||||||
if current_bootstrapper.try_import(module, abstract_spec):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
assert exception_handler, (
|
|
||||||
f"expected at least one exception to have been raised at this point: "
|
|
||||||
f"while bootstrapping {module}"
|
|
||||||
)
|
|
||||||
msg = f'cannot bootstrap the "{module}" Python module '
|
msg = f'cannot bootstrap the "{module}" Python module '
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += f'from spec "{abstract_spec}" '
|
msg += f'from spec "{abstract_spec}" '
|
||||||
if tty.is_debug():
|
|
||||||
|
if not exception_handler:
|
||||||
|
msg += ": no bootstrapping sources are enabled"
|
||||||
|
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
else:
|
else:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||||
raise ImportError(msg)
|
raise ImportError(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -405,8 +420,9 @@ def ensure_executables_in_path_or_raise(
|
|||||||
exception_handler = GroupedExceptionHandler()
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
|
if not source_is_enabled(current_config):
|
||||||
|
continue
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
source_is_enabled_or_raise(current_config)
|
|
||||||
current_bootstrapper = create_bootstrapper(current_config)
|
current_bootstrapper = create_bootstrapper(current_config)
|
||||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||||
# Additional environment variables needed
|
# Additional environment variables needed
|
||||||
@@ -414,6 +430,7 @@ def ensure_executables_in_path_or_raise(
|
|||||||
current_bootstrapper.last_search["spec"],
|
current_bootstrapper.last_search["spec"],
|
||||||
current_bootstrapper.last_search["command"],
|
current_bootstrapper.last_search["command"],
|
||||||
)
|
)
|
||||||
|
assert cmd is not None, "expected an Executable"
|
||||||
cmd.add_default_envmod(
|
cmd.add_default_envmod(
|
||||||
spack.user_environment.environment_modifications_for_specs(
|
spack.user_environment.environment_modifications_for_specs(
|
||||||
concrete_spec, set_package_py_globals=False
|
concrete_spec, set_package_py_globals=False
|
||||||
@@ -421,18 +438,17 @@ def ensure_executables_in_path_or_raise(
|
|||||||
)
|
)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
assert exception_handler, (
|
|
||||||
f"expected at least one exception to have been raised at this point: "
|
|
||||||
f"while bootstrapping {executables_str}"
|
|
||||||
)
|
|
||||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += f'from spec "{abstract_spec}" '
|
msg += f'from spec "{abstract_spec}" '
|
||||||
if tty.is_debug():
|
|
||||||
|
if not exception_handler:
|
||||||
|
msg += ": no bootstrapping sources are enabled"
|
||||||
|
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
else:
|
else:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -63,7 +63,6 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
|||||||
|
|
||||||
def _core_requirements() -> List[RequiredResponseType]:
|
def _core_requirements() -> List[RequiredResponseType]:
|
||||||
_core_system_exes = {
|
_core_system_exes = {
|
||||||
"make": _missing("make", "required to build software from sources"),
|
|
||||||
"patch": _missing("patch", "required to patch source code before building"),
|
"patch": _missing("patch", "required to patch source code before building"),
|
||||||
"tar": _missing("tar", "required to manage code archives"),
|
"tar": _missing("tar", "required to manage code archives"),
|
||||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||||
|
@@ -44,7 +44,19 @@
|
|||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from multiprocessing.connection import Connection
|
from multiprocessing.connection import Connection
|
||||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Sequence,
|
||||||
|
Set,
|
||||||
|
TextIO,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -146,48 +158,128 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
|||||||
|
|
||||||
|
|
||||||
class MakeExecutable(Executable):
|
class MakeExecutable(Executable):
|
||||||
"""Special callable executable object for make so the user can specify
|
"""Special callable executable object for make so the user can specify parallelism options
|
||||||
parallelism options on a per-invocation basis. Specifying
|
on a per-invocation basis.
|
||||||
'parallel' to the call will override whatever the package's
|
|
||||||
global setting is, so you can either default to true or false and
|
|
||||||
override particular calls. Specifying 'jobs_env' to a particular
|
|
||||||
call will name an environment variable which will be set to the
|
|
||||||
parallelism level (without affecting the normal invocation with
|
|
||||||
-j).
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name, jobs, **kwargs):
|
def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
|
||||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
super().__init__(name)
|
||||||
super().__init__(name, **kwargs)
|
|
||||||
self.supports_jobserver = supports_jobserver
|
self.supports_jobserver = supports_jobserver
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
@overload
|
||||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
def __call__(
|
||||||
remaining arguments are passed through to the superclass.
|
self,
|
||||||
"""
|
*args: str,
|
||||||
parallel = kwargs.pop("parallel", True)
|
parallel: bool = ...,
|
||||||
jobs_env = kwargs.pop("jobs_env", None)
|
jobs_env: Optional[str] = ...,
|
||||||
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
jobs_env_supports_jobserver: bool = ...,
|
||||||
|
fail_on_error: bool = ...,
|
||||||
|
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||||
|
ignore_quotes: Optional[bool] = ...,
|
||||||
|
timeout: Optional[int] = ...,
|
||||||
|
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
input: Optional[TextIO] = ...,
|
||||||
|
output: Union[Optional[TextIO], str] = ...,
|
||||||
|
error: Union[Optional[TextIO], str] = ...,
|
||||||
|
_dump_env: Optional[Dict[str, str]] = ...,
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
*args: str,
|
||||||
|
parallel: bool = ...,
|
||||||
|
jobs_env: Optional[str] = ...,
|
||||||
|
jobs_env_supports_jobserver: bool = ...,
|
||||||
|
fail_on_error: bool = ...,
|
||||||
|
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||||
|
ignore_quotes: Optional[bool] = ...,
|
||||||
|
timeout: Optional[int] = ...,
|
||||||
|
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
input: Optional[TextIO] = ...,
|
||||||
|
output: Union[Type[str], Callable] = ...,
|
||||||
|
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||||
|
_dump_env: Optional[Dict[str, str]] = ...,
|
||||||
|
) -> str: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
*args: str,
|
||||||
|
parallel: bool = ...,
|
||||||
|
jobs_env: Optional[str] = ...,
|
||||||
|
jobs_env_supports_jobserver: bool = ...,
|
||||||
|
fail_on_error: bool = ...,
|
||||||
|
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||||
|
ignore_quotes: Optional[bool] = ...,
|
||||||
|
timeout: Optional[int] = ...,
|
||||||
|
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
input: Optional[TextIO] = ...,
|
||||||
|
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||||
|
error: Union[Type[str], Callable] = ...,
|
||||||
|
_dump_env: Optional[Dict[str, str]] = ...,
|
||||||
|
) -> str: ...
|
||||||
|
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
*args: str,
|
||||||
|
parallel: bool = True,
|
||||||
|
jobs_env: Optional[str] = None,
|
||||||
|
jobs_env_supports_jobserver: bool = False,
|
||||||
|
**kwargs,
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Runs this "make" executable in a subprocess.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parallel: if False, parallelism is disabled
|
||||||
|
jobs_env: environment variable that will be set to the current level of parallelism
|
||||||
|
jobs_env_supports_jobserver: whether the jobs env supports a job server
|
||||||
|
|
||||||
|
For all the other **kwargs, refer to the base class.
|
||||||
|
"""
|
||||||
jobs = get_effective_jobs(
|
jobs = get_effective_jobs(
|
||||||
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||||
)
|
)
|
||||||
if jobs is not None:
|
if jobs is not None:
|
||||||
args = ("-j{0}".format(jobs),) + args
|
args = (f"-j{jobs}",) + args
|
||||||
|
|
||||||
if jobs_env:
|
if jobs_env:
|
||||||
# Caller wants us to set an environment variable to
|
# Caller wants us to set an environment variable to control the parallelism
|
||||||
# control the parallelism.
|
|
||||||
jobs_env_jobs = get_effective_jobs(
|
jobs_env_jobs = get_effective_jobs(
|
||||||
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||||
)
|
)
|
||||||
if jobs_env_jobs is not None:
|
if jobs_env_jobs is not None:
|
||||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
extra_env = kwargs.setdefault("extra_env", {})
|
||||||
|
extra_env.update({jobs_env: str(jobs_env_jobs)})
|
||||||
|
|
||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class UndeclaredDependencyError(spack.error.SpackError):
|
||||||
|
"""Raised if a dependency is invoking an executable through a module global, without
|
||||||
|
declaring a dependency on it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedExecutable:
|
||||||
|
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
|
||||||
|
self.pkg = pkg
|
||||||
|
self.exe = exe
|
||||||
|
self.exe_pkg = exe_pkg
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
raise UndeclaredDependencyError(
|
||||||
|
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_default_env(self, key: str, value: str):
|
||||||
|
self.__call__()
|
||||||
|
|
||||||
|
|
||||||
def clean_environment():
|
def clean_environment():
|
||||||
# Stuff in here sanitizes the build environment to eliminate
|
# Stuff in here sanitizes the build environment to eliminate
|
||||||
# anything the user has set that may interfere. We apply it immediately
|
# anything the user has set that may interfere. We apply it immediately
|
||||||
@@ -209,11 +301,13 @@ def clean_environment():
|
|||||||
env.unset("CPLUS_INCLUDE_PATH")
|
env.unset("CPLUS_INCLUDE_PATH")
|
||||||
env.unset("OBJC_INCLUDE_PATH")
|
env.unset("OBJC_INCLUDE_PATH")
|
||||||
|
|
||||||
|
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
||||||
|
env.set("CONFIG_SITE", os.devnull)
|
||||||
env.unset("CMAKE_PREFIX_PATH")
|
env.unset("CMAKE_PREFIX_PATH")
|
||||||
|
|
||||||
env.unset("PYTHONPATH")
|
env.unset("PYTHONPATH")
|
||||||
env.unset("R_HOME")
|
env.unset("R_HOME")
|
||||||
env.unset("R_ENVIRON")
|
env.unset("R_ENVIRON")
|
||||||
|
|
||||||
env.unset("LUA_PATH")
|
env.unset("LUA_PATH")
|
||||||
env.unset("LUA_CPATH")
|
env.unset("LUA_CPATH")
|
||||||
|
|
||||||
@@ -621,10 +715,9 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||||
|
|
||||||
# TODO: make these build deps that can be installed if not found.
|
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||||
module.make = MakeExecutable("make", jobs)
|
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||||
module.gmake = MakeExecutable("gmake", jobs)
|
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||||
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
|
||||||
# TODO: johnwparent: add package or builder support to define these build tools
|
# TODO: johnwparent: add package or builder support to define these build tools
|
||||||
# for now there is no entrypoint for builders to define these on their
|
# for now there is no entrypoint for builders to define these on their
|
||||||
# own
|
# own
|
||||||
@@ -788,21 +881,6 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
|||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(pkg):
|
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
|
||||||
|
|
||||||
Traverse a package's dependencies and load any external modules
|
|
||||||
associated with them.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): package to load deps for
|
|
||||||
"""
|
|
||||||
for dep in list(pkg.spec.traverse()):
|
|
||||||
external_modules = dep.external_modules or []
|
|
||||||
for external_module in external_modules:
|
|
||||||
load_module(external_module)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||||
"""Execute all environment setup routines."""
|
"""Execute all environment setup routines."""
|
||||||
if context not in (Context.BUILD, Context.TEST):
|
if context not in (Context.BUILD, Context.TEST):
|
||||||
@@ -853,7 +931,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
load_external_modules(pkg)
|
load_external_modules(setup_context)
|
||||||
|
|
||||||
# Make sure nothing's strange about the Spack environment.
|
# Make sure nothing's strange about the Spack environment.
|
||||||
validate(env_mods, tty.warn)
|
validate(env_mods, tty.warn)
|
||||||
@@ -1142,6 +1220,21 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
|||||||
env.prepend_path("PATH", bin_dir)
|
env.prepend_path("PATH", bin_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def load_external_modules(context: SetupContext) -> None:
|
||||||
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
|
Traverse a package's dependencies and load any external modules
|
||||||
|
associated with them.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: A populated SetupContext object
|
||||||
|
"""
|
||||||
|
for spec, _ in context.external:
|
||||||
|
external_modules = spec.external_modules or []
|
||||||
|
for external_module in external_modules:
|
||||||
|
load_module(external_module)
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||||
function: Callable,
|
function: Callable,
|
||||||
|
@@ -6,7 +6,9 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.directives
|
import spack.directives
|
||||||
|
import spack.spec
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
import spack.util.prefix
|
||||||
|
|
||||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||||
|
|
||||||
@@ -17,19 +19,18 @@ class AspellBuilder(AutotoolsBuilder):
|
|||||||
to the Aspell extensions.
|
to the Aspell extensions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self,
|
||||||
|
pkg: "AspellDictPackage", # type: ignore[override]
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
|
):
|
||||||
aspell = spec["aspell"].prefix.bin.aspell
|
aspell = spec["aspell"].prefix.bin.aspell
|
||||||
prezip = spec["aspell"].prefix.bin.prezip
|
prezip = spec["aspell"].prefix.bin.prezip
|
||||||
destdir = prefix
|
destdir = prefix
|
||||||
|
|
||||||
sh = spack.util.executable.which("sh")
|
sh = spack.util.executable.Executable("/bin/sh")
|
||||||
sh(
|
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||||
"./configure",
|
|
||||||
"--vars",
|
|
||||||
"ASPELL={0}".format(aspell),
|
|
||||||
"PREZIP={0}".format(prezip),
|
|
||||||
"DESTDIR={0}".format(destdir),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Aspell dictionaries install their bits into their prefix.lib
|
# Aspell dictionaries install their bits into their prefix.lib
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||||
@@ -356,6 +355,13 @@ def _do_patch_libtool_configure(self) -> None:
|
|||||||
)
|
)
|
||||||
# Support Libtool 2.4.2 and older:
|
# Support Libtool 2.4.2 and older:
|
||||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||||
|
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||||
|
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||||
|
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||||
|
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||||
|
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||||
|
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||||
|
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("configure")
|
@spack.phase_callbacks.run_after("configure")
|
||||||
def _do_patch_libtool(self) -> None:
|
def _do_patch_libtool(self) -> None:
|
||||||
@@ -527,7 +533,7 @@ def build_directory(self) -> str:
|
|||||||
return build_dir
|
return build_dir
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("autoreconf")
|
@spack.phase_callbacks.run_before("autoreconf")
|
||||||
def delete_configure_to_force_update(self) -> None:
|
def _delete_configure_to_force_update(self) -> None:
|
||||||
if self.force_autoreconf:
|
if self.force_autoreconf:
|
||||||
fs.force_remove(self.configure_abs_path)
|
fs.force_remove(self.configure_abs_path)
|
||||||
|
|
||||||
@@ -540,7 +546,7 @@ def autoreconf_search_path_args(self) -> List[str]:
|
|||||||
return _autoreconf_search_path_args(self.spec)
|
return _autoreconf_search_path_args(self.spec)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("autoreconf")
|
@spack.phase_callbacks.run_after("autoreconf")
|
||||||
def set_configure_or_die(self) -> None:
|
def _set_configure_or_die(self) -> None:
|
||||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||||
is found, a module level attribute is set.
|
is found, a module level attribute is set.
|
||||||
|
|
||||||
@@ -564,10 +570,7 @@ def configure_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def autoreconf(
|
def autoreconf(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Not needed usually, configure should be already there"""
|
"""Not needed usually, configure should be already there"""
|
||||||
|
|
||||||
@@ -596,10 +599,7 @@ def autoreconf(
|
|||||||
self.pkg.module.autoreconf(*autoreconf_args)
|
self.pkg.module.autoreconf(*autoreconf_args)
|
||||||
|
|
||||||
def configure(
|
def configure(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "configure", with the arguments specified by the builder and an
|
"""Run "configure", with the arguments specified by the builder and an
|
||||||
appropriately set prefix.
|
appropriately set prefix.
|
||||||
@@ -612,10 +612,7 @@ def configure(
|
|||||||
pkg.module.configure(*options)
|
pkg.module.configure(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
# See https://autotools.io/automake/silent.html
|
# See https://autotools.io/automake/silent.html
|
||||||
@@ -625,10 +622,7 @@ def build(
|
|||||||
pkg.module.make(*params)
|
pkg.module.make(*params)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
@@ -825,7 +819,7 @@ def installcheck(self) -> None:
|
|||||||
self.pkg._if_make_target_execute("installcheck")
|
self.pkg._if_make_target_execute("installcheck")
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.phase_callbacks.run_after("install")
|
||||||
def remove_libtool_archives(self) -> None:
|
def _remove_libtool_archives(self) -> None:
|
||||||
"""Remove all .la files in prefix sub-folders if the package sets
|
"""Remove all .la files in prefix sub-folders if the package sets
|
||||||
``install_libtool_archives`` to be False.
|
``install_libtool_archives`` to be False.
|
||||||
"""
|
"""
|
||||||
|
@@ -10,6 +10,9 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
|
from spack.directives import depends_on
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
@@ -293,6 +296,13 @@ def initconfig_hardware_entries(self):
|
|||||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||||
|
|
||||||
|
if spec.satisfies("%gcc"):
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string(
|
||||||
|
"CMAKE_HIP_FLAGS", f"--gcc-toolchain={self.pkg.compiler.prefix}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
@@ -323,7 +333,9 @@ def initconfig_package_entries(self):
|
|||||||
"""This method is to be overwritten by the package"""
|
"""This method is to be overwritten by the package"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def initconfig(self, pkg, spec, prefix):
|
def initconfig(
|
||||||
|
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
cache_entries = (
|
cache_entries = (
|
||||||
self.std_initconfig_entries()
|
self.std_initconfig_entries()
|
||||||
+ self.initconfig_compiler_entries()
|
+ self.initconfig_compiler_entries()
|
||||||
@@ -360,6 +372,10 @@ class CachedCMakePackage(CMakePackage):
|
|||||||
|
|
||||||
CMakeBuilder = CachedCMakeBuilder
|
CMakeBuilder = CachedCMakeBuilder
|
||||||
|
|
||||||
|
# These dependencies are assumed in the builder
|
||||||
|
depends_on("c", type="build")
|
||||||
|
depends_on("cxx", type="build")
|
||||||
|
|
||||||
def flag_handler(self, name, flags):
|
def flag_handler(self, name, flags):
|
||||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||||
return None, None, None # handled in the cmake cache
|
return None, None, None # handled in the cmake cache
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -68,10 +70,16 @@ def build_directory(self):
|
|||||||
"""Return the directory containing the main Cargo.toml."""
|
"""Return the directory containing the main Cargo.toml."""
|
||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def std_build_args(self):
|
||||||
|
"""Standard arguments for ``cargo build`` provided as a property for
|
||||||
|
convenience of package writers."""
|
||||||
|
return ["-j", str(self.pkg.module.make_jobs)]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``cargo build``."""
|
"""Arguments for ``cargo build``."""
|
||||||
return ["-j", str(self.pkg.module.make_jobs)]
|
return []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
@@ -81,12 +89,18 @@ def check_args(self):
|
|||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
env.set("CARGO_HOME", self.stage.path)
|
env.set("CARGO_HOME", self.stage.path)
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Runs ``cargo install`` in the source directory"""
|
"""Runs ``cargo install`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
pkg.module.cargo(
|
||||||
|
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
|
||||||
|
)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Copy build files into package prefix."""
|
"""Copy build files into package prefix."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.install_tree("out", prefix)
|
fs.install_tree("out", prefix)
|
||||||
|
@@ -11,6 +11,7 @@
|
|||||||
from typing import Any, List, Optional, Tuple
|
from typing import Any, List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
from llnl.util import tty
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -454,18 +455,27 @@ def cmake_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def cmake(
|
def cmake(
|
||||||
self,
|
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
|
|
||||||
# skip cmake phase if it is an incremental develop build
|
if spec.is_develop:
|
||||||
if spec.is_develop and os.path.isfile(
|
# skip cmake phase if it is an incremental develop build
|
||||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
|
||||||
):
|
# Determine the files that will re-run CMake that are generated from a successful
|
||||||
return
|
# configure step based on state
|
||||||
|
primary_generator = _extract_primary_generator(self.generator)
|
||||||
|
configure_artifact = "Makefile"
|
||||||
|
if primary_generator == "Ninja":
|
||||||
|
configure_artifact = "ninja.build"
|
||||||
|
|
||||||
|
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
|
||||||
|
tty.msg(
|
||||||
|
"Incremental build criteria satisfied."
|
||||||
|
"Skipping CMake configure step. To force configuration run"
|
||||||
|
f" `spack clean {pkg.name}`"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
options = self.std_cmake_args
|
options = self.std_cmake_args
|
||||||
options += self.cmake_args()
|
options += self.cmake_args()
|
||||||
@@ -474,10 +484,7 @@ def cmake(
|
|||||||
pkg.module.cmake(*options)
|
pkg.module.cmake(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
@@ -488,10 +495,7 @@ def build(
|
|||||||
pkg.module.ninja(*self.build_targets)
|
pkg.module.ninja(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
|
|||||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||||
and is meant to unify and facilitate its usage.
|
and is meant to unify and facilitate its usage.
|
||||||
|
|
||||||
Maintainers: ax3l, Rombur, davidbeckingsale
|
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||||
@@ -47,6 +47,12 @@ class CudaPackage(PackageBase):
|
|||||||
"89",
|
"89",
|
||||||
"90",
|
"90",
|
||||||
"90a",
|
"90a",
|
||||||
|
"100",
|
||||||
|
"100a",
|
||||||
|
"101",
|
||||||
|
"101a",
|
||||||
|
"120",
|
||||||
|
"120a",
|
||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||||
@@ -99,39 +105,56 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
# CUDA version vs Architecture
|
# CUDA version vs Architecture
|
||||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||||
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
||||||
|
# Tesla support:
|
||||||
depends_on("cuda@:6.0", when="cuda_arch=10")
|
depends_on("cuda@:6.0", when="cuda_arch=10")
|
||||||
depends_on("cuda@:6.5", when="cuda_arch=11")
|
depends_on("cuda@:6.5", when="cuda_arch=11")
|
||||||
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
||||||
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
||||||
|
|
||||||
|
# Fermi support:
|
||||||
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
||||||
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
||||||
|
|
||||||
|
# Kepler support:
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||||
|
|
||||||
|
# Maxwell support:
|
||||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=53")
|
depends_on("cuda@6.5:", when="cuda_arch=53")
|
||||||
|
|
||||||
|
# Pascal support:
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=60")
|
depends_on("cuda@8.0:", when="cuda_arch=60")
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=61")
|
depends_on("cuda@8.0:", when="cuda_arch=61")
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=62")
|
depends_on("cuda@8.0:", when="cuda_arch=62")
|
||||||
|
|
||||||
|
# Volta support:
|
||||||
depends_on("cuda@9.0:", when="cuda_arch=70")
|
depends_on("cuda@9.0:", when="cuda_arch=70")
|
||||||
|
# Turing support:
|
||||||
depends_on("cuda@9.0:", when="cuda_arch=72")
|
depends_on("cuda@9.0:", when="cuda_arch=72")
|
||||||
depends_on("cuda@10.0:", when="cuda_arch=75")
|
depends_on("cuda@10.0:", when="cuda_arch=75")
|
||||||
|
|
||||||
|
# Ampere support:
|
||||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||||
|
# Ada support:
|
||||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||||
|
|
||||||
|
# Hopper support:
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||||
|
|
||||||
|
# Blackwell support:
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=100")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=100a")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=101")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=101a")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=120")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=120a")
|
||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||||
@@ -163,6 +186,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||||
|
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
@@ -171,6 +195,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||||
|
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||||
|
|
||||||
@@ -48,3 +50,8 @@ class GenericBuilder(BuilderWithDefaults):
|
|||||||
|
|
||||||
# unconditionally perform any post-install phase tests
|
# unconditionally perform any post-install phase tests
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
|
def install(
|
||||||
|
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
@@ -7,7 +7,9 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
from spack.directives import build_system, extends
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||||
@@ -26,9 +28,7 @@ class GoPackage(spack.package_base.PackageBase):
|
|||||||
build_system("go")
|
build_system("go")
|
||||||
|
|
||||||
with when("build_system=go"):
|
with when("build_system=go"):
|
||||||
# TODO: this seems like it should be depends_on, see
|
depends_on("go", type="build")
|
||||||
# setup_dependent_build_environment in go for why I kept it like this
|
|
||||||
extends("go@1.14:", type="build")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("go")
|
@spack.builder.builder("go")
|
||||||
@@ -71,6 +71,7 @@ class GoBuilder(BuilderWithDefaults):
|
|||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
env.set("GO111MODULE", "on")
|
env.set("GO111MODULE", "on")
|
||||||
env.set("GOTOOLCHAIN", "local")
|
env.set("GOTOOLCHAIN", "local")
|
||||||
|
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
@@ -81,19 +82,31 @@ def build_directory(self):
|
|||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``go build``."""
|
"""Arguments for ``go build``."""
|
||||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
return [
|
||||||
|
"-p",
|
||||||
|
str(self.pkg.module.make_jobs),
|
||||||
|
"-modcacherw",
|
||||||
|
"-ldflags",
|
||||||
|
"-s -w",
|
||||||
|
"-o",
|
||||||
|
f"{self.pkg.name}",
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
"""Argument for ``go test`` during check phase"""
|
"""Argument for ``go test`` during check phase"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Runs ``go build`` in the source directory"""
|
"""Runs ``go build`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.go("build", *self.build_args)
|
pkg.module.go("build", *self.build_args)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install built binaries into prefix bin."""
|
"""Install built binaries into prefix bin."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.mkdirp(prefix.bin)
|
fs.mkdirp(prefix.bin)
|
||||||
|
@@ -7,7 +7,9 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -55,7 +57,9 @@ class LuaBuilder(spack.builder.Builder):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def unpack(self, pkg, spec, prefix):
|
def unpack(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||||
dirlines = directory.split("\n")
|
dirlines = directory.split("\n")
|
||||||
@@ -66,15 +70,16 @@ def unpack(self, pkg, spec, prefix):
|
|||||||
def _generate_tree_line(name, prefix):
|
def _generate_tree_line(name, prefix):
|
||||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||||
|
|
||||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
def generate_luarocks_config(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
spec = self.pkg.spec
|
spec = self.pkg.spec
|
||||||
table_entries = []
|
table_entries = []
|
||||||
for d in spec.traverse(deptype=("build", "run")):
|
for d in spec.traverse(deptype=("build", "run")):
|
||||||
if d.package.extends(self.pkg.extendee_spec):
|
if d.package.extends(self.pkg.extendee_spec):
|
||||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||||
|
|
||||||
path = self._luarocks_config_path()
|
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
||||||
with open(path, "w", encoding="utf-8") as config:
|
|
||||||
config.write(
|
config.write(
|
||||||
"""
|
"""
|
||||||
deps_mode="all"
|
deps_mode="all"
|
||||||
@@ -85,23 +90,26 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
|||||||
"\n".join(table_entries)
|
"\n".join(table_entries)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return path
|
|
||||||
|
|
||||||
def preprocess(self, pkg, spec, prefix):
|
def preprocess(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Override this to preprocess source before building with luarocks"""
|
"""Override this to preprocess source before building with luarocks"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def luarocks_args(self):
|
def luarocks_args(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
rock = "."
|
rock = "."
|
||||||
specs = find(".", "*.rockspec", recursive=False)
|
specs = find(".", "*.rockspec", recursive=False)
|
||||||
if specs:
|
if specs:
|
||||||
rock = specs[0]
|
rock = specs[0]
|
||||||
rocks_args = self.luarocks_args()
|
rocks_args = self.luarocks_args()
|
||||||
rocks_args.append(rock)
|
rocks_args.append(rock)
|
||||||
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||||
|
|
||||||
def _luarocks_config_path(self):
|
def _luarocks_config_path(self):
|
||||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||||
|
@@ -98,29 +98,20 @@ def build_directory(self) -> str:
|
|||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
def edit(
|
def edit(
|
||||||
self,
|
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.make(*self.build_targets)
|
pkg.module.make(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
@@ -58,16 +60,20 @@ def build_args(self):
|
|||||||
"""List of args to pass to build phase."""
|
"""List of args to pass to build phase."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Compile code and package into a JAR file."""
|
"""Compile code and package into a JAR file."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
mvn = which("mvn")
|
mvn = which("mvn", required=True)
|
||||||
if self.pkg.run_tests:
|
if self.pkg.run_tests:
|
||||||
mvn("verify", *self.build_args())
|
mvn("verify", *self.build_args())
|
||||||
else:
|
else:
|
||||||
mvn("package", "-DskipTests", *self.build_args())
|
mvn("package", "-DskipTests", *self.build_args())
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Copy to installation prefix."""
|
"""Copy to installation prefix."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.install_tree(".", prefix)
|
fs.install_tree(".", prefix)
|
||||||
|
@@ -48,6 +48,9 @@ class MesonPackage(spack.package_base.PackageBase):
|
|||||||
variant("strip", default=False, description="Strip targets on install")
|
variant("strip", default=False, description="Strip targets on install")
|
||||||
depends_on("meson", type="build")
|
depends_on("meson", type="build")
|
||||||
depends_on("ninja", type="build")
|
depends_on("ninja", type="build")
|
||||||
|
# Meson uses pkg-config for dependency detection, and this dependency is
|
||||||
|
# often overlooked by packages that use meson as a build system.
|
||||||
|
depends_on("pkgconfig", type="build")
|
||||||
# Python detection in meson requires distutils to be importable, but distutils no longer
|
# Python detection in meson requires distutils to be importable, but distutils no longer
|
||||||
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
||||||
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
||||||
@@ -188,10 +191,7 @@ def meson_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def meson(
|
def meson(
|
||||||
self,
|
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run ``meson`` in the build directory"""
|
"""Run ``meson`` in the build directory"""
|
||||||
options = []
|
options = []
|
||||||
@@ -204,10 +204,7 @@ def meson(
|
|||||||
pkg.module.meson(*options)
|
pkg.module.meson(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
options = ["-v"]
|
options = ["-v"]
|
||||||
@@ -216,10 +213,7 @@ def build(
|
|||||||
pkg.module.ninja(*options)
|
pkg.module.ninja(*options)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -99,7 +101,9 @@ def msbuild_install_args(self):
|
|||||||
as `msbuild_args` by default."""
|
as `msbuild_args` by default."""
|
||||||
return self.msbuild_args()
|
return self.msbuild_args()
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "msbuild" on the build targets specified by the builder."""
|
"""Run "msbuild" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.msbuild(
|
pkg.module.msbuild(
|
||||||
@@ -108,7 +112,9 @@ def build(self, pkg, spec, prefix):
|
|||||||
self.define_targets(*self.build_targets),
|
self.define_targets(*self.build_targets),
|
||||||
)
|
)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "msbuild" on the install targets specified by the builder.
|
"""Run "msbuild" on the install targets specified by the builder.
|
||||||
This is INSTALL by default"""
|
This is INSTALL by default"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -123,7 +125,9 @@ def nmake_install_args(self):
|
|||||||
Individual packages should override to specify NMake args to command line"""
|
Individual packages should override to specify NMake args to command line"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "nmake" on the build targets specified by the builder."""
|
"""Run "nmake" on the build targets specified by the builder."""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
opts += self.nmake_args()
|
opts += self.nmake_args()
|
||||||
@@ -132,7 +136,9 @@ def build(self, pkg, spec, prefix):
|
|||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "nmake" on the install targets specified by the builder.
|
"""Run "nmake" on the install targets specified by the builder.
|
||||||
This is INSTALL by default"""
|
This is INSTALL by default"""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
|
@@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -42,7 +44,9 @@ class OctaveBuilder(BuilderWithDefaults):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install the package from the archive file"""
|
"""Install the package from the archive file"""
|
||||||
pkg.module.octave(
|
pkg.module.octave(
|
||||||
"--quiet",
|
"--quiet",
|
||||||
|
@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
|
|||||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||||
"""
|
"""
|
||||||
# Only if environment modifications are desired (default is +envmods)
|
# Only if environment modifications are desired (default is +envmods)
|
||||||
if "~envmods" not in self.spec:
|
if "+envmods" in self.spec:
|
||||||
env.extend(
|
env.extend(
|
||||||
EnvironmentModifications.from_sourcing_file(
|
EnvironmentModifications.from_sourcing_file(
|
||||||
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
||||||
|
@@ -10,6 +10,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.install_test import SkipTest, test_part
|
from spack.install_test import SkipTest, test_part
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -149,7 +151,9 @@ def configure_args(self):
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||||
an appropriate installation base directory followed by the
|
an appropriate installation base directory followed by the
|
||||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||||
@@ -173,7 +177,9 @@ def fix_shebang(self):
|
|||||||
repl = "#!/usr/bin/env perl"
|
repl = "#!/usr/bin/env perl"
|
||||||
filter_file(pattern, repl, "Build", backup=False)
|
filter_file(pattern, repl, "Build", backup=False)
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Builds a Perl package."""
|
"""Builds a Perl package."""
|
||||||
self.build_executable()
|
self.build_executable()
|
||||||
|
|
||||||
@@ -184,6 +190,8 @@ def check(self):
|
|||||||
"""Runs built-in tests of a Perl package."""
|
"""Runs built-in tests of a Perl package."""
|
||||||
self.build_executable("test")
|
self.build_executable("test")
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Installs a Perl package."""
|
"""Installs a Perl package."""
|
||||||
self.build_executable("install")
|
self.build_executable("install")
|
||||||
|
@@ -28,6 +28,7 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import test_part
|
from spack.install_test import test_part
|
||||||
@@ -263,16 +264,17 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
# Ensure architecture information is present
|
# Ensure architecture information is present
|
||||||
if not python.architecture:
|
if not python.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system("default_os")
|
host_os = host_platform.default_operating_system()
|
||||||
host_target = host_platform.target("default_target")
|
host_target = host_platform.default_target()
|
||||||
python.architecture = spack.spec.ArchSpec(
|
python.architecture = spack.spec.ArchSpec(
|
||||||
(str(host_platform), str(host_os), str(host_target))
|
(str(host_platform), str(host_os), str(host_target))
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not python.architecture.platform:
|
if not python.architecture.platform:
|
||||||
python.architecture.platform = spack.platforms.host()
|
python.architecture.platform = spack.platforms.host()
|
||||||
|
platform = spack.platforms.by_name(python.architecture.platform)
|
||||||
if not python.architecture.os:
|
if not python.architecture.os:
|
||||||
python.architecture.os = "default_os"
|
python.architecture.os = platform.default_operating_system()
|
||||||
if not python.architecture.target:
|
if not python.architecture.target:
|
||||||
python.architecture.target = archspec.cpu.host().family.name
|
python.architecture.target = archspec.cpu.host().family.name
|
||||||
|
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||||
@@ -27,6 +29,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
|||||||
build_system("qmake")
|
build_system("qmake")
|
||||||
|
|
||||||
depends_on("qmake", type="build", when="build_system=qmake")
|
depends_on("qmake", type="build", when="build_system=qmake")
|
||||||
|
depends_on("gmake", type="build")
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("qmake")
|
@spack.builder.builder("qmake")
|
||||||
@@ -61,17 +64,23 @@ def qmake_args(self):
|
|||||||
"""List of arguments passed to qmake."""
|
"""List of arguments passed to qmake."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def qmake(self, pkg, spec, prefix):
|
def qmake(
|
||||||
|
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.qmake(*self.qmake_args())
|
pkg.module.qmake(*self.qmake_args())
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.make()
|
pkg.module.make()
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.make("install")
|
pkg.module.make("install")
|
||||||
|
@@ -94,7 +94,7 @@ def list_url(cls):
|
|||||||
if cls.cran:
|
if cls.cran:
|
||||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||||
|
|
||||||
@property
|
@lang.classproperty
|
||||||
def git(self):
|
def git(cls):
|
||||||
if self.bioc:
|
if cls.bioc:
|
||||||
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||||
|
@@ -9,6 +9,8 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||||
from spack.config import determine_number_of_jobs
|
from spack.config import determine_number_of_jobs
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
@@ -74,18 +76,22 @@ def build_directory(self):
|
|||||||
ret = os.path.join(ret, self.subdirectory)
|
ret = os.path.join(ret, self.subdirectory)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
raco = Executable("raco")
|
raco = Executable("raco")
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||||
|
name = pkg.racket_name
|
||||||
|
assert name is not None, "Racket package name is not set"
|
||||||
args = [
|
args = [
|
||||||
"pkg",
|
"pkg",
|
||||||
"install",
|
"install",
|
||||||
"-t",
|
"-t",
|
||||||
"dir",
|
"dir",
|
||||||
"-n",
|
"-n",
|
||||||
self.pkg.racket_name,
|
name,
|
||||||
"--deps",
|
"--deps",
|
||||||
"fail",
|
"fail",
|
||||||
"--ignore-implies",
|
"--ignore-implies",
|
||||||
@@ -101,8 +107,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
except ProcessError:
|
except ProcessError:
|
||||||
args.insert(-2, "--skip-installed")
|
args.insert(-2, "--skip-installed")
|
||||||
raco(*args)
|
raco(*args)
|
||||||
msg = (
|
tty.warn(
|
||||||
"Racket package {0} was already installed, uninstalling via "
|
f"Racket package {name} was already installed, uninstalling via "
|
||||||
"Spack may make someone unhappy!"
|
"Spack may make someone unhappy!"
|
||||||
)
|
)
|
||||||
tty.warn(msg.format(self.pkg.racket_name))
|
|
||||||
|
@@ -140,7 +140,7 @@ class ROCmPackage(PackageBase):
|
|||||||
when="+rocm",
|
when="+rocm",
|
||||||
)
|
)
|
||||||
|
|
||||||
depends_on("llvm-amdgpu", when="+rocm")
|
depends_on("llvm-amdgpu", type="build", when="+rocm")
|
||||||
depends_on("hsa-rocr-dev", when="+rocm")
|
depends_on("hsa-rocr-dev", when="+rocm")
|
||||||
depends_on("hip +rocm", when="+rocm")
|
depends_on("hip +rocm", when="+rocm")
|
||||||
|
|
||||||
|
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -42,7 +44,9 @@ class RubyBuilder(BuilderWithDefaults):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Build a Ruby gem."""
|
"""Build a Ruby gem."""
|
||||||
|
|
||||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||||
@@ -58,7 +62,9 @@ def build(self, pkg, spec, prefix):
|
|||||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install a Ruby gem.
|
"""Install a Ruby gem.
|
||||||
|
|
||||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||||
|
@@ -4,6 +4,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||||
@@ -59,7 +61,9 @@ def build_args(self, spec, prefix):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
pkg.module.scons(*self.build_args(spec, prefix))
|
pkg.module.scons(*self.build_args(spec, prefix))
|
||||||
|
|
||||||
@@ -67,7 +71,9 @@ def install_args(self, spec, prefix):
|
|||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||||
|
|
||||||
|
@@ -11,6 +11,8 @@
|
|||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
@@ -41,6 +43,7 @@ class SIPPackage(spack.package_base.PackageBase):
|
|||||||
with when("build_system=sip"):
|
with when("build_system=sip"):
|
||||||
extends("python", type=("build", "link", "run"))
|
extends("python", type=("build", "link", "run"))
|
||||||
depends_on("py-sip", type="build")
|
depends_on("py-sip", type="build")
|
||||||
|
depends_on("gmake", type="build")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -130,7 +133,9 @@ class SIPBuilder(BuilderWithDefaults):
|
|||||||
|
|
||||||
build_directory = "build"
|
build_directory = "build"
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Configure the package."""
|
"""Configure the package."""
|
||||||
|
|
||||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||||
@@ -148,7 +153,9 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
@@ -159,7 +166,9 @@ def build_args(self):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||||
@@ -97,7 +99,9 @@ def waf(self, *args, **kwargs):
|
|||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Configures the project."""
|
"""Configures the project."""
|
||||||
args = ["--prefix={0}".format(self.pkg.prefix)]
|
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||||
args += self.configure_args()
|
args += self.configure_args()
|
||||||
@@ -108,7 +112,9 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Executes the build."""
|
"""Executes the build."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
@@ -118,7 +124,9 @@ def build_args(self):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Installs the targets on the system."""
|
"""Installs the targets on the system."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
|
@@ -6,6 +6,7 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
@@ -14,16 +15,15 @@
|
|||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Set
|
from typing import Callable, Dict, List, Set
|
||||||
from urllib.error import HTTPError, URLError
|
from urllib.request import Request
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
|
||||||
|
|
||||||
|
import llnl.path
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.builder
|
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -33,6 +33,7 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -63,6 +64,8 @@
|
|||||||
|
|
||||||
PushResult = namedtuple("PushResult", "success url")
|
PushResult = namedtuple("PushResult", "success url")
|
||||||
|
|
||||||
|
urlopen = web_util.urlopen # alias for mocking in tests
|
||||||
|
|
||||||
|
|
||||||
def get_change_revisions():
|
def get_change_revisions():
|
||||||
"""If this is a git repo get the revisions to use when checking
|
"""If this is a git repo get the revisions to use when checking
|
||||||
@@ -82,6 +85,9 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
|||||||
whether or not the stack was changed. Returns True if the environment
|
whether or not the stack was changed. Returns True if the environment
|
||||||
manifest changed between the provided revisions (or additionally if the
|
manifest changed between the provided revisions (or additionally if the
|
||||||
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
||||||
|
# git returns posix paths always, normalize input to be comptaible
|
||||||
|
# with that
|
||||||
|
env_path = llnl.path.convert_to_posix_path(env_path)
|
||||||
git = spack.util.git.git()
|
git = spack.util.git.git()
|
||||||
if git:
|
if git:
|
||||||
with fs.working_dir(spack.paths.prefix):
|
with fs.working_dir(spack.paths.prefix):
|
||||||
@@ -219,7 +225,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
|||||||
|
|
||||||
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
||||||
reason_msg = ", ".join(reasons)
|
reason_msg = ", ".join(reasons)
|
||||||
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
|
||||||
|
|
||||||
if not prune:
|
if not prune:
|
||||||
status = colorize("@*g{[x]} ")
|
status = colorize("@*g{[x]} ")
|
||||||
@@ -472,12 +478,9 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
|||||||
# Use all unpruned specs to populate the build group for this set
|
# Use all unpruned specs to populate the build group for this set
|
||||||
cdash_config = cfg.get("cdash")
|
cdash_config = cfg.get("cdash")
|
||||||
if options.cdash_handler and options.cdash_handler.auth_token:
|
if options.cdash_handler and options.cdash_handler.auth_token:
|
||||||
try:
|
options.cdash_handler.populate_buildgroup(
|
||||||
options.cdash_handler.populate_buildgroup(
|
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
)
|
||||||
)
|
|
||||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
|
||||||
tty.warn(f"Problem populating buildgroup: {err}")
|
|
||||||
elif cdash_config:
|
elif cdash_config:
|
||||||
# warn only if there was actually a CDash configuration.
|
# warn only if there was actually a CDash configuration.
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
@@ -579,22 +582,25 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
|||||||
tty.debug(f"job spec: {job_spec}")
|
tty.debug(f"job spec: {job_spec}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||||
job_pkg = pkg_cls(job_spec)
|
except spack.error.SpackError as e:
|
||||||
tty.debug(f"job package: {job_pkg}")
|
tty.error(f"Cannot copy logs: {str(e)}")
|
||||||
except AssertionError:
|
|
||||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
|
||||||
tty.error(msg)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
stage_dir = job_pkg.stage.path
|
# Get the package's archived files
|
||||||
tty.debug(f"stage dir: {stage_dir}")
|
archive_files = []
|
||||||
for file in [
|
archive_root = package_metadata_root / "archived-files"
|
||||||
job_pkg.log_path,
|
if archive_root.is_dir():
|
||||||
job_pkg.env_mods_path,
|
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||||
*spack.builder.create(job_pkg).archive_files,
|
else:
|
||||||
]:
|
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||||
copy_files_to_artifacts(file, job_log_dir)
|
tty.warn(msg)
|
||||||
|
|
||||||
|
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||||
|
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||||
|
|
||||||
|
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||||
|
copy_files_to_artifacts(str(f), job_log_dir)
|
||||||
|
|
||||||
|
|
||||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||||
@@ -614,7 +620,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir):
|
def download_and_extract_artifacts(url, work_dir) -> str:
|
||||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
and extract the contents into the given work_dir
|
and extract the contents into the given work_dir
|
||||||
|
|
||||||
@@ -622,6 +628,10 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
|
|
||||||
url (str): Complete url to artifacts.zip file
|
url (str): Complete url to artifacts.zip file
|
||||||
work_dir (str): Path to destination where artifacts should be extracted
|
work_dir (str): Path to destination where artifacts should be extracted
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
Artifacts root path relative to the archive root
|
||||||
"""
|
"""
|
||||||
tty.msg(f"Fetching artifacts from: {url}")
|
tty.msg(f"Fetching artifacts from: {url}")
|
||||||
|
|
||||||
@@ -631,31 +641,33 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
if token:
|
if token:
|
||||||
headers["PRIVATE-TOKEN"] = token
|
headers["PRIVATE-TOKEN"] = token
|
||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
request = Request(url, headers=headers, method="GET")
|
||||||
|
|
||||||
request = Request(url, headers=headers)
|
|
||||||
request.get_method = lambda: "GET"
|
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
|
||||||
response_code = response.getcode()
|
|
||||||
|
|
||||||
if response_code != 200:
|
|
||||||
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
|
||||||
raise SpackError(msg)
|
|
||||||
|
|
||||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||||
|
os.makedirs(work_dir, exist_ok=True)
|
||||||
|
|
||||||
if not os.path.exists(work_dir):
|
try:
|
||||||
os.makedirs(work_dir)
|
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
with open(artifacts_zip_path, "wb") as out_file:
|
||||||
|
shutil.copyfileobj(response, out_file)
|
||||||
|
|
||||||
with open(artifacts_zip_path, "wb") as out_file:
|
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
||||||
shutil.copyfileobj(response, out_file)
|
zip_file.extractall(work_dir)
|
||||||
|
# Get the artifact root
|
||||||
|
artifact_root = ""
|
||||||
|
for f in zip_file.filelist:
|
||||||
|
if "spack.lock" in f.filename:
|
||||||
|
artifact_root = os.path.dirname(os.path.dirname(f.filename))
|
||||||
|
break
|
||||||
|
except OSError as e:
|
||||||
|
raise SpackError(f"Error fetching artifacts: {e}")
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.remove(artifacts_zip_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
# If the file doesn't exist we are already raising
|
||||||
|
pass
|
||||||
|
|
||||||
zip_file = zipfile.ZipFile(artifacts_zip_path)
|
return artifact_root
|
||||||
zip_file.extractall(work_dir)
|
|
||||||
zip_file.close()
|
|
||||||
|
|
||||||
os.remove(artifacts_zip_path)
|
|
||||||
|
|
||||||
|
|
||||||
def get_spack_info():
|
def get_spack_info():
|
||||||
@@ -769,7 +781,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head):
|
||||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||||
attempt to setup an environment in which the failure can be reproduced
|
attempt to setup an environment in which the failure can be reproduced
|
||||||
locally. This entails the following:
|
locally. This entails the following:
|
||||||
@@ -783,8 +795,11 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
|||||||
commands to run to reproduce the build once inside the container.
|
commands to run to reproduce the build once inside the container.
|
||||||
"""
|
"""
|
||||||
work_dir = os.path.realpath(work_dir)
|
work_dir = os.path.realpath(work_dir)
|
||||||
|
if os.path.exists(work_dir) and os.listdir(work_dir):
|
||||||
|
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
|
||||||
|
|
||||||
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
||||||
download_and_extract_artifacts(url, work_dir)
|
artifact_root = download_and_extract_artifacts(url, work_dir)
|
||||||
|
|
||||||
gpg_path = None
|
gpg_path = None
|
||||||
if gpg_url:
|
if gpg_url:
|
||||||
@@ -846,6 +861,9 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
|||||||
with open(repro_file, encoding="utf-8") as fd:
|
with open(repro_file, encoding="utf-8") as fd:
|
||||||
repro_details = json.load(fd)
|
repro_details = json.load(fd)
|
||||||
|
|
||||||
|
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
|
||||||
|
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
|
||||||
|
|
||||||
repro_dir = os.path.dirname(repro_file)
|
repro_dir = os.path.dirname(repro_file)
|
||||||
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
|
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
|
||||||
|
|
||||||
@@ -906,17 +924,20 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
|||||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||||
|
|
||||||
# Try the more specific merge commit regex first
|
if use_local_head:
|
||||||
m = merge_commit_regex.search(spack_info)
|
commit_1 = "HEAD"
|
||||||
if m:
|
|
||||||
# This was a merge commit and we captured the parents
|
|
||||||
commit_1 = m.group(1)
|
|
||||||
commit_2 = m.group(2)
|
|
||||||
else:
|
else:
|
||||||
# Not a merge commit, just get the commit sha
|
# Try the more specific merge commit regex first
|
||||||
m = commit_regex.search(spack_info)
|
m = merge_commit_regex.search(spack_info)
|
||||||
if m:
|
if m:
|
||||||
|
# This was a merge commit and we captured the parents
|
||||||
commit_1 = m.group(1)
|
commit_1 = m.group(1)
|
||||||
|
commit_2 = m.group(2)
|
||||||
|
else:
|
||||||
|
# Not a merge commit, just get the commit sha
|
||||||
|
m = commit_regex.search(spack_info)
|
||||||
|
if m:
|
||||||
|
commit_1 = m.group(1)
|
||||||
|
|
||||||
setup_result = False
|
setup_result = False
|
||||||
if commit_1:
|
if commit_1:
|
||||||
@@ -991,6 +1012,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
|||||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Attempt to create a unique name for the reproducer container
|
||||||
|
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
|
||||||
docker_command = [
|
docker_command = [
|
||||||
runtime,
|
runtime,
|
||||||
"run",
|
"run",
|
||||||
@@ -998,14 +1021,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
|||||||
"-t",
|
"-t",
|
||||||
"--rm",
|
"--rm",
|
||||||
"--name",
|
"--name",
|
||||||
"spack_reproducer",
|
f"spack_reproducer{container_suffix}",
|
||||||
"-v",
|
"-v",
|
||||||
":".join([work_dir, mounted_workdir, "Z"]),
|
":".join([work_dir, mounted_workdir, "Z"]),
|
||||||
"-v",
|
"-v",
|
||||||
":".join(
|
":".join(
|
||||||
[
|
[
|
||||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
os.path.join(work_dir, artifact_root),
|
||||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
os.path.join(mount_as_dir, artifact_root),
|
||||||
"Z",
|
"Z",
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
@@ -1,23 +1,21 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import codecs
|
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import ssl
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||||
from urllib.parse import quote, urlencode, urlparse
|
from urllib.parse import quote, urlencode, urlparse
|
||||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import Singleton, memoized
|
from llnl.util.lang import memoized
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
@@ -35,32 +33,11 @@
|
|||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
|
|
||||||
def _urlopen():
|
|
||||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl enabled
|
|
||||||
with_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl disabled
|
|
||||||
without_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# And dynamically dispatch based on the config:verify_ssl.
|
|
||||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
|
||||||
opener = with_ssl if verify_ssl else without_ssl
|
|
||||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
|
||||||
return opener.open(fullurl, data, timeout)
|
|
||||||
|
|
||||||
return dispatch_open
|
|
||||||
|
|
||||||
|
|
||||||
IS_WINDOWS = sys.platform == "win32"
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
|
||||||
|
# this exists purely for testing purposes
|
||||||
|
_urlopen = web_util.urlopen
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_artifacts(src, artifacts_dir):
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
@@ -279,26 +256,25 @@ def copy_test_results(self, source, dest):
|
|||||||
reports = fs.join_path(source, "*_Test*.xml")
|
reports = fs.join_path(source, "*_Test*.xml")
|
||||||
copy_files_to_artifacts(reports, dest)
|
copy_files_to_artifacts(reports, dest)
|
||||||
|
|
||||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
def create_buildgroup(self, headers, url, group_name, group_type):
|
||||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
try:
|
||||||
response_code = response.getcode()
|
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
|
||||||
|
except OSError as e:
|
||||||
if response_code not in [200, 201]:
|
tty.warn(f"Failed to create CDash buildgroup: {e}")
|
||||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
|
||||||
tty.warn(msg)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
response_text = response.read()
|
try:
|
||||||
response_json = json.loads(response_text)
|
response_json = json.loads(response_text)
|
||||||
build_group_id = response_json["id"]
|
return response_json["id"]
|
||||||
|
except (json.JSONDecodeError, KeyError) as e:
|
||||||
return build_group_id
|
tty.warn(f"Failed to parse CDash response: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
def populate_buildgroup(self, job_names):
|
def populate_buildgroup(self, job_names):
|
||||||
url = f"{self.url}/api/v1/buildgroup.php"
|
url = f"{self.url}/api/v1/buildgroup.php"
|
||||||
@@ -308,16 +284,11 @@ def populate_buildgroup(self, job_names):
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
|
||||||
|
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
|
||||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
|
||||||
group_id = self.create_buildgroup(
|
|
||||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
if not parent_group_id or not group_id:
|
||||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
|
||||||
tty.warn(msg)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
@@ -329,15 +300,12 @@ def populate_buildgroup(self, job_names):
|
|||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers, method="PUT")
|
||||||
request.get_method = lambda: "PUT"
|
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
try:
|
||||||
response_code = response.getcode()
|
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
except OSError as e:
|
||||||
if response_code != 200:
|
tty.warn(f"Failed to populate CDash buildgroup: {e}")
|
||||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||||
@@ -735,9 +703,6 @@ def _apply_section(dest, src):
|
|||||||
for value in header.values():
|
for value in header.values():
|
||||||
value = os.path.expandvars(value)
|
value = os.path.expandvars(value)
|
||||||
|
|
||||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
|
||||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
|
||||||
|
|
||||||
required = mapping.get("require", [])
|
required = mapping.get("require", [])
|
||||||
allowed = mapping.get("allow", [])
|
allowed = mapping.get("allow", [])
|
||||||
ignored = mapping.get("ignore", [])
|
ignored = mapping.get("ignore", [])
|
||||||
@@ -771,19 +736,15 @@ def job_query(job):
|
|||||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
response = _dyn_mapping_urlopener(
|
response = _urlopen(request)
|
||||||
request, verify_ssl=verify_ssl, timeout=timeout
|
config = json.load(response)
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For now just ignore any errors from dynamic mapping and continue
|
# For now just ignore any errors from dynamic mapping and continue
|
||||||
# This is still experimental, and failures should not stop CI
|
# This is still experimental, and failures should not stop CI
|
||||||
# from running normally
|
# from running normally
|
||||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
|
||||||
tty.warn(f"{e}")
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
config = json.load(codecs.getreader("utf-8")(response))
|
|
||||||
|
|
||||||
# Strip ignore keys
|
# Strip ignore keys
|
||||||
if ignored:
|
if ignored:
|
||||||
for key in ignored:
|
for key in ignored:
|
||||||
|
@@ -202,7 +202,7 @@ def _concretize_spec_pairs(
|
|||||||
# Special case for concretizing a single spec
|
# Special case for concretizing a single spec
|
||||||
if len(to_concretize) == 1:
|
if len(to_concretize) == 1:
|
||||||
abstract, concrete = to_concretize[0]
|
abstract, concrete = to_concretize[0]
|
||||||
return [concrete or abstract.concretized(tests=tests)]
|
return [concrete or spack.concretize.concretize_one(abstract, tests=tests)]
|
||||||
|
|
||||||
# Special case if every spec is either concrete or has an abstract hash
|
# Special case if every spec is either concrete or has an abstract hash
|
||||||
if all(
|
if all(
|
||||||
@@ -254,9 +254,9 @@ def matching_spec_from_env(spec):
|
|||||||
"""
|
"""
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env:
|
if env:
|
||||||
return env.matching_spec(spec) or spec.concretized()
|
return env.matching_spec(spec) or spack.concretize.concretize_one(spec)
|
||||||
else:
|
else:
|
||||||
return spec.concretized()
|
return spack.concretize.concretize_one(spec)
|
||||||
|
|
||||||
|
|
||||||
def matching_specs_from_env(specs):
|
def matching_specs_from_env(specs):
|
||||||
@@ -297,7 +297,7 @@ def disambiguate_spec(
|
|||||||
|
|
||||||
def disambiguate_spec_from_hashes(
|
def disambiguate_spec_from_hashes(
|
||||||
spec: spack.spec.Spec,
|
spec: spack.spec.Spec,
|
||||||
hashes: List[str],
|
hashes: Optional[List[str]],
|
||||||
local: bool = False,
|
local: bool = False,
|
||||||
installed: Union[bool, InstallRecordStatus] = True,
|
installed: Union[bool, InstallRecordStatus] = True,
|
||||||
first: bool = False,
|
first: bool = False,
|
||||||
@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
|||||||
if len(matching_specs) <= 1:
|
if len(matching_specs) <= 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
|
||||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||||
args += [
|
args += [
|
||||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||||
@@ -471,12 +471,11 @@ def get_arg(name, default=None):
|
|||||||
nfmt = "{fullname}" if namespaces else "{name}"
|
nfmt = "{fullname}" if namespaces else "{name}"
|
||||||
ffmt = ""
|
ffmt = ""
|
||||||
if full_compiler or flags:
|
if full_compiler or flags:
|
||||||
ffmt += "{%compiler.name}"
|
ffmt += "{compiler_flags} {%compiler.name}"
|
||||||
if full_compiler:
|
if full_compiler:
|
||||||
ffmt += "{@compiler.version}"
|
ffmt += "{@compiler.version}"
|
||||||
ffmt += " {compiler_flags}"
|
|
||||||
vfmt = "{variants}" if variants else ""
|
vfmt = "{variants}" if variants else ""
|
||||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||||
|
|
||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
|
import warnings
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -51,10 +52,10 @@ def setup_parser(subparser):
|
|||||||
"-t", "--target", action="store_true", default=False, help="print only the target"
|
"-t", "--target", action="store_true", default=False, help="print only the target"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-f", "--frontend", action="store_true", default=False, help="print frontend"
|
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-b", "--backend", action="store_true", default=False, help="print backend"
|
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -98,15 +99,14 @@ def arch(parser, args):
|
|||||||
display_targets(archspec.cpu.TARGETS)
|
display_targets(archspec.cpu.TARGETS)
|
||||||
return
|
return
|
||||||
|
|
||||||
os_args, target_args = "default_os", "default_target"
|
|
||||||
if args.frontend:
|
if args.frontend:
|
||||||
os_args, target_args = "frontend", "frontend"
|
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
|
||||||
elif args.backend:
|
elif args.backend:
|
||||||
os_args, target_args = "backend", "backend"
|
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
|
||||||
|
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system(os_args)
|
host_os = host_platform.default_operating_system()
|
||||||
host_target = host_platform.target(target_args)
|
host_target = host_platform.default_target()
|
||||||
if args.family:
|
if args.family:
|
||||||
host_target = host_target.family
|
host_target = host_target.family
|
||||||
elif args.generic:
|
elif args.generic:
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
@@ -14,9 +14,9 @@
|
|||||||
import spack.bootstrap
|
import spack.bootstrap
|
||||||
import spack.bootstrap.config
|
import spack.bootstrap.config
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.mirrors.utils
|
import spack.mirrors.utils
|
||||||
import spack.spec
|
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
@@ -397,7 +397,7 @@ def _mirror(args):
|
|||||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||||
# Suppress tty from the call below for terser messages
|
# Suppress tty from the call below for terser messages
|
||||||
llnl.util.tty.set_msg_enabled(False)
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
spec = spack.spec.Spec(spec_str).concretized()
|
spec = spack.concretize.concretize_one(spec_str)
|
||||||
for node in spec.traverse():
|
for node in spec.traverse():
|
||||||
spack.mirrors.utils.create(mirror_dir, [node])
|
spack.mirrors.utils.create(mirror_dir, [node])
|
||||||
llnl.util.tty.set_msg_enabled(True)
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
@@ -436,6 +436,7 @@ def write_metadata(subdir, metadata):
|
|||||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||||
instructions += cmd.format("local-binaries", rel_directory)
|
instructions += cmd.format("local-binaries", rel_directory)
|
||||||
|
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||||
print(instructions)
|
print(instructions)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -554,8 +555,7 @@ def check_fn(args: argparse.Namespace):
|
|||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
return
|
return
|
||||||
|
|
||||||
for spec in specs:
|
specs = [spack.concretize.concretize_one(s) for s in specs]
|
||||||
spec.concretize()
|
|
||||||
|
|
||||||
# Next see if there are any configured binary mirrors
|
# Next see if there are any configured binary mirrors
|
||||||
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||||
@@ -623,7 +623,7 @@ def save_specfile_fn(args):
|
|||||||
root = specs[0]
|
root = specs[0]
|
||||||
|
|
||||||
if not root.concrete:
|
if not root.concrete:
|
||||||
root.concretize()
|
root = spack.concretize.concretize_one(root)
|
||||||
|
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, Optional
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
import llnl.string
|
import llnl.string
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -181,7 +181,11 @@ def checksum(parser, args):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
if args.add_to_package:
|
if args.add_to_package:
|
||||||
add_versions_to_package(pkg, version_lines, args.batch)
|
path = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||||
|
num_versions_added = add_versions_to_pkg(path, version_lines)
|
||||||
|
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
|
||||||
|
if not args.batch and sys.stdin.isatty():
|
||||||
|
editor(path)
|
||||||
|
|
||||||
|
|
||||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||||
@@ -227,20 +231,9 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
|||||||
tty.die("Invalid checksums found.")
|
tty.die("Invalid checksums found.")
|
||||||
|
|
||||||
|
|
||||||
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]:
|
||||||
"""
|
"""Returns a tuple of number of versions added and the package's modified contents."""
|
||||||
Add checksumed versions to a package's instructions and open a user's
|
|
||||||
editor so they may double check the work of the function.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
|
||||||
version_lines (str): A string of rendered version lines.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Get filename and path for package
|
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
|
||||||
num_versions_added = 0
|
num_versions_added = 0
|
||||||
|
|
||||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||||
|
|
||||||
@@ -252,33 +245,34 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool
|
|||||||
if match:
|
if match:
|
||||||
new_versions.append((Version(match.group(1)), ver_line))
|
new_versions.append((Version(match.group(1)), ver_line))
|
||||||
|
|
||||||
with open(filename, "r+", encoding="utf-8") as f:
|
split_contents = version_statement_re.split(package_src)
|
||||||
contents = f.read()
|
|
||||||
split_contents = version_statement_re.split(contents)
|
|
||||||
|
|
||||||
for i, subsection in enumerate(split_contents):
|
for i, subsection in enumerate(split_contents):
|
||||||
# If there are no more versions to add we should exit
|
# If there are no more versions to add we should exit
|
||||||
if len(new_versions) <= 0:
|
if len(new_versions) <= 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
# Check if the section contains a version
|
# Check if the section contains a version
|
||||||
contents_version = version_re.match(subsection)
|
contents_version = version_re.match(subsection)
|
||||||
if contents_version is not None:
|
if contents_version is not None:
|
||||||
parsed_version = Version(contents_version.group(1))
|
parsed_version = Version(contents_version.group(1))
|
||||||
|
|
||||||
if parsed_version < new_versions[0][0]:
|
if parsed_version < new_versions[0][0]:
|
||||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
||||||
num_versions_added += 1
|
num_versions_added += 1
|
||||||
|
|
||||||
elif parsed_version == new_versions[0][0]:
|
elif parsed_version == new_versions[0][0]:
|
||||||
new_versions.pop(0)
|
new_versions.pop(0)
|
||||||
|
|
||||||
# Seek back to the start of the file so we can rewrite the file contents.
|
return num_versions_added, "".join(split_contents)
|
||||||
f.seek(0)
|
|
||||||
f.writelines("".join(split_contents))
|
|
||||||
|
|
||||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
|
||||||
tty.msg(f"Open {filename} to review the additions.")
|
|
||||||
|
|
||||||
if sys.stdout.isatty() and not is_batch:
|
def add_versions_to_pkg(path: str, version_lines: str) -> int:
|
||||||
editor(filename)
|
"""Add new versions to a package.py file. Returns the number of versions added."""
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
package_src = f.read()
|
||||||
|
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
|
||||||
|
if num_versions_added > 0:
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(package_src)
|
||||||
|
return num_versions_added
|
||||||
|
@@ -176,6 +176,11 @@ def setup_parser(subparser):
|
|||||||
reproduce.add_argument(
|
reproduce.add_argument(
|
||||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||||
)
|
)
|
||||||
|
reproduce.add_argument(
|
||||||
|
"--use-local-head",
|
||||||
|
help="Use the HEAD of the local Spack instead of reproducing a commit",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||||
gpg_group.add_argument(
|
gpg_group.add_argument(
|
||||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||||
@@ -422,7 +427,7 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
deps_install_args = install_args + ["--only=dependencies"]
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
root_install_args = install_args + ["--only=package"]
|
||||||
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
@@ -459,8 +464,7 @@ def ci_rebuild(args):
|
|||||||
job_spec.to_dict(hash=ht.dag_hash),
|
job_spec.to_dict(hash=ht.dag_hash),
|
||||||
)
|
)
|
||||||
|
|
||||||
# We generated the "spack install ..." command to "--keep-stage", copy
|
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
||||||
# any logs from the staging directory to artifacts now
|
|
||||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||||
|
|
||||||
# If the installation succeeded and we're running stand-alone tests for
|
# If the installation succeeded and we're running stand-alone tests for
|
||||||
@@ -608,7 +612,12 @@ def ci_reproduce(args):
|
|||||||
gpg_key_url = None
|
gpg_key_url = None
|
||||||
|
|
||||||
return spack_ci.reproduce_ci_job(
|
return spack_ci.reproduce_ci_job(
|
||||||
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
|
args.job_url,
|
||||||
|
args.working_dir,
|
||||||
|
args.autostart,
|
||||||
|
gpg_key_url,
|
||||||
|
args.runtime,
|
||||||
|
args.use_local_head,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os.path
|
import os
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
@@ -528,7 +528,6 @@ def __call__(self, parser, namespace, values, option_string):
|
|||||||
# the const from the constructor or a value from the CLI.
|
# the const from the constructor or a value from the CLI.
|
||||||
# Note that this is only called if the argument is actually
|
# Note that this is only called if the argument is actually
|
||||||
# specified on the command line.
|
# specified on the command line.
|
||||||
spack.config.CONFIG.ensure_scope_ordering()
|
|
||||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -350,9 +350,12 @@ def _config_change(config_path, match_spec_str=None):
|
|||||||
if spack.config.get(key_path, scope=scope):
|
if spack.config.get(key_path, scope=scope):
|
||||||
ideal_scope_to_modify = scope
|
ideal_scope_to_modify = scope
|
||||||
break
|
break
|
||||||
|
# If we find our key in a specific scope, that's the one we want
|
||||||
|
# to modify. Otherwise we use the default write scope.
|
||||||
|
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
|
||||||
|
|
||||||
update_path = f"{key_path}:[{str(spec)}]"
|
update_path = f"{key_path}:[{str(spec)}]"
|
||||||
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
spack.config.add(update_path, scope=write_scope)
|
||||||
else:
|
else:
|
||||||
raise ValueError("'config change' can currently only change 'require' sections")
|
raise ValueError("'config change' can currently only change 'require' sections")
|
||||||
|
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
|
@@ -2,23 +2,11 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
|
||||||
import platform
|
import platform
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
from glob import glob
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.filesystem import working_dir
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.paths
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
|
||||||
import spack.util.git
|
|
||||||
from spack.util.executable import which
|
|
||||||
|
|
||||||
description = "debugging commands for troubleshooting Spack"
|
description = "debugging commands for troubleshooting Spack"
|
||||||
section = "developer"
|
section = "developer"
|
||||||
@@ -27,67 +15,13 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
|
||||||
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
|
|
||||||
sp.add_parser("report", help="print information useful for bug reports")
|
sp.add_parser("report", help="print information useful for bug reports")
|
||||||
|
|
||||||
|
|
||||||
def _debug_tarball_suffix():
|
|
||||||
now = datetime.now()
|
|
||||||
suffix = now.strftime("%Y-%m-%d-%H%M%S")
|
|
||||||
|
|
||||||
git = spack.util.git.git()
|
|
||||||
if not git:
|
|
||||||
return "nobranch-nogit-%s" % suffix
|
|
||||||
|
|
||||||
with working_dir(spack.paths.prefix):
|
|
||||||
if not os.path.isdir(".git"):
|
|
||||||
return "nobranch.nogit.%s" % suffix
|
|
||||||
|
|
||||||
# Get symbolic branch name and strip any special chars (mainly '/')
|
|
||||||
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
|
|
||||||
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
|
|
||||||
|
|
||||||
# Get the commit hash too.
|
|
||||||
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
|
|
||||||
|
|
||||||
if symbolic == commit:
|
|
||||||
return "nobranch.%s.%s" % (commit, suffix)
|
|
||||||
else:
|
|
||||||
return "%s.%s.%s" % (symbolic, commit, suffix)
|
|
||||||
|
|
||||||
|
|
||||||
def create_db_tarball(args):
|
|
||||||
tar = which("tar")
|
|
||||||
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
|
|
||||||
tarball_path = os.path.abspath(tarball_name)
|
|
||||||
|
|
||||||
base = os.path.basename(str(spack.store.STORE.root))
|
|
||||||
transform_args = []
|
|
||||||
# Currently --transform and -s are not supported by Windows native tar
|
|
||||||
if "GNU" in tar("--version", output=str):
|
|
||||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
|
||||||
elif sys.platform != "win32":
|
|
||||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
|
||||||
|
|
||||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
|
||||||
with working_dir(wd):
|
|
||||||
files = [spack.store.STORE.db._index_path]
|
|
||||||
files += glob("%s/*/*/*/.spack/spec.json" % base)
|
|
||||||
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
|
|
||||||
files = [os.path.relpath(f) for f in files]
|
|
||||||
|
|
||||||
args = ["-czf", tarball_path]
|
|
||||||
args += transform_args
|
|
||||||
args += files
|
|
||||||
tar(*args)
|
|
||||||
|
|
||||||
tty.msg("Created %s" % tarball_name)
|
|
||||||
|
|
||||||
|
|
||||||
def report(args):
|
def report(args):
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system("frontend")
|
host_os = host_platform.default_operating_system()
|
||||||
host_target = host_platform.target("frontend")
|
host_target = host_platform.default_target()
|
||||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
print("* **Spack:**", spack.get_version())
|
print("* **Spack:**", spack.get_version())
|
||||||
print("* **Python:**", platform.python_version())
|
print("* **Python:**", platform.python_version())
|
||||||
@@ -95,5 +29,5 @@ def report(args):
|
|||||||
|
|
||||||
|
|
||||||
def debug(parser, args):
|
def debug(parser, args):
|
||||||
action = {"create-db-tarball": create_db_tarball, "report": report}
|
if args.debug_command == "report":
|
||||||
action[args.debug_command](args)
|
report(args)
|
||||||
|
@@ -9,9 +9,9 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.package_base
|
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
from spack.solver.input_analysis import create_graph_analyzer
|
||||||
|
|
||||||
description = "show dependencies of a package"
|
description = "show dependencies of a package"
|
||||||
section = "basic"
|
section = "basic"
|
||||||
@@ -55,7 +55,7 @@ def dependencies(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||||
deps = spack.store.STORE.db.installed_relatives(
|
deps = spack.store.STORE.db.installed_relatives(
|
||||||
@@ -68,15 +68,17 @@ def dependencies(parser, args):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
spec = specs[0]
|
spec = specs[0]
|
||||||
dependencies = spack.package_base.possible_dependencies(
|
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
|
||||||
spec,
|
spec,
|
||||||
transitive=args.transitive,
|
transitive=args.transitive,
|
||||||
expand_virtuals=args.expand_virtuals,
|
expand_virtuals=args.expand_virtuals,
|
||||||
depflag=args.deptype,
|
allowed_deps=args.deptype,
|
||||||
)
|
)
|
||||||
|
if not args.expand_virtuals:
|
||||||
|
dependencies.update(virtuals)
|
||||||
|
|
||||||
if spec.name in dependencies:
|
if spec.name in dependencies:
|
||||||
del dependencies[spec.name]
|
dependencies.remove(spec.name)
|
||||||
|
|
||||||
if dependencies:
|
if dependencies:
|
||||||
colify(sorted(dependencies))
|
colify(sorted(dependencies))
|
||||||
|
@@ -93,7 +93,7 @@ def dependents(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
format_string = "{name}{@version}{/hash:7}{%compiler}"
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||||
|
@@ -18,6 +18,7 @@
|
|||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.concretize
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.installer
|
import spack.installer
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -103,7 +104,7 @@ def deprecate(parser, args):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if args.install:
|
if args.install:
|
||||||
deprecator = specs[1].concretized()
|
deprecator = spack.concretize.concretize_one(specs[1])
|
||||||
else:
|
else:
|
||||||
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
||||||
|
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
@@ -113,8 +114,8 @@ def dev_build(self, args):
|
|||||||
source_path = os.path.abspath(source_path)
|
source_path = os.path.abspath(source_path)
|
||||||
|
|
||||||
# Forces the build to run out of the source directory.
|
# Forces the build to run out of the source directory.
|
||||||
spec.constrain("dev_path=%s" % source_path)
|
spec.constrain(f'dev_path="{source_path}"')
|
||||||
spec.concretize()
|
spec = spack.concretize.concretize_one(spec)
|
||||||
|
|
||||||
if spec.installed:
|
if spec.installed:
|
||||||
tty.error("Already installed in %s" % spec.prefix)
|
tty.error("Already installed in %s" % spec.prefix)
|
||||||
|
@@ -125,7 +125,7 @@ def develop(parser, args):
|
|||||||
version = spec.versions.concrete_range_as_version
|
version = spec.versions.concrete_range_as_version
|
||||||
if not version:
|
if not version:
|
||||||
# look up the maximum version so infintiy versions are preferred for develop
|
# look up the maximum version so infintiy versions are preferred for develop
|
||||||
version = max(spec.package_class.versions.keys())
|
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
|
||||||
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
|
||||||
spec.versions = spack.version.VersionList([version])
|
spec.versions = spack.version.VersionList([version])
|
||||||
|
|
||||||
|
@@ -110,10 +110,7 @@ def external_find(args):
|
|||||||
# Note that KeyboardInterrupt does not subclass Exception
|
# Note that KeyboardInterrupt does not subclass Exception
|
||||||
# (so CTRL-C will terminate the program as expected).
|
# (so CTRL-C will terminate the program as expected).
|
||||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
|
||||||
errno.EPERM,
|
|
||||||
errno.EACCES,
|
|
||||||
]:
|
|
||||||
# The manifest file does not have sufficient permissions enabled:
|
# The manifest file does not have sufficient permissions enabled:
|
||||||
# print a warning and keep going
|
# print a warning and keep going
|
||||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||||
|
@@ -54,10 +54,6 @@
|
|||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|
||||||
cross-compiling:
|
|
||||||
@m{os=backend} or @m{os=be} build for compute node (backend)
|
|
||||||
@m{os=frontend} or @m{os=fe} build for login node (frontend)
|
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
^dependency [constraints] specify constraints on dependencies
|
^dependency [constraints] specify constraints on dependencies
|
||||||
^@K{/hash} build with a specific installed
|
^@K{/hash} build with a specific installed
|
||||||
@@ -77,7 +73,7 @@
|
|||||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||||
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
mvapich2 @B{fabrics=psm,mrail,sock} @g{%gcc}
|
||||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@@ -13,6 +13,7 @@
|
|||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@@ -450,7 +451,7 @@ def concrete_specs_from_file(args):
|
|||||||
else:
|
else:
|
||||||
s = spack.spec.Spec.from_json(f)
|
s = spack.spec.Spec.from_json(f)
|
||||||
|
|
||||||
concretized = s.concretized()
|
concretized = spack.concretize.concretize_one(s)
|
||||||
if concretized.dag_hash() != s.dag_hash():
|
if concretized.dag_hash() != s.dag_hash():
|
||||||
msg = 'skipped invalid file "{0}". '
|
msg = 'skipped invalid file "{0}". '
|
||||||
msg += "The file does not contain a concrete spec."
|
msg += "The file does not contain a concrete spec."
|
||||||
|
@@ -7,9 +7,9 @@
|
|||||||
|
|
||||||
from llnl.path import convert_to_posix_path
|
from llnl.path import convert_to_posix_path
|
||||||
|
|
||||||
|
import spack.concretize
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
from spack.spec import Spec
|
|
||||||
|
|
||||||
description = "generate Windows installer"
|
description = "generate Windows installer"
|
||||||
section = "admin"
|
section = "admin"
|
||||||
@@ -65,8 +65,7 @@ def make_installer(parser, args):
|
|||||||
"""
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
output_dir = args.output_dir
|
output_dir = args.output_dir
|
||||||
cmake_spec = Spec("cmake")
|
cmake_spec = spack.concretize.concretize_one("cmake")
|
||||||
cmake_spec.concretize()
|
|
||||||
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
||||||
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
||||||
spack_source = args.spack_source
|
spack_source = args.spack_source
|
||||||
|
@@ -492,7 +492,7 @@ def extend_with_additional_versions(specs, num_versions):
|
|||||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||||
else:
|
else:
|
||||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||||
mirror_specs = [x.concretized() for x in mirror_specs]
|
mirror_specs = [spack.concretize.concretize_one(x) for x in mirror_specs]
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
|
|
||||||
@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
|
|||||||
package does not explicitly forbid redistributing source."""
|
package does not explicitly forbid redistributing source."""
|
||||||
if self.private:
|
if self.private:
|
||||||
return True
|
return True
|
||||||
elif x.package_class.redistribute_source(x):
|
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
"""Implementation details of the ``spack module`` command."""
|
"""Implementation details of the ``spack module`` command."""
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -383,8 +383,10 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
|||||||
query = " ".join(str(s) for s in args.constraint_specs)
|
query = " ".join(str(s) for s in args.constraint_specs)
|
||||||
msg = f"the constraint '{query}' matches multiple packages:\n"
|
msg = f"the constraint '{query}' matches multiple packages:\n"
|
||||||
for s in specs:
|
for s in specs:
|
||||||
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
|
spec_fmt = (
|
||||||
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
|
"{hash:7} {name}{@version}{compiler_flags}{variants}"
|
||||||
|
"{arch=architecture} {%compiler}"
|
||||||
|
)
|
||||||
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
msg += "\t" + s.cformat(spec_fmt) + "\n"
|
||||||
tty.die(msg, "In this context exactly *one* match is needed.")
|
tty.die(msg, "In this context exactly *one* match is needed.")
|
||||||
|
|
||||||
|
@@ -41,7 +41,11 @@ def providers(parser, args):
|
|||||||
specs = spack.cmd.parse_specs(args.virtual_package)
|
specs = spack.cmd.parse_specs(args.virtual_package)
|
||||||
|
|
||||||
# Check prerequisites
|
# Check prerequisites
|
||||||
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
|
non_virtual = [
|
||||||
|
str(s)
|
||||||
|
for s in specs
|
||||||
|
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
|
||||||
|
]
|
||||||
if non_virtual:
|
if non_virtual:
|
||||||
msg = "non-virtual specs cannot be part of the query "
|
msg = "non-virtual specs cannot be part of the query "
|
||||||
msg += "[{0}]\n".format(", ".join(non_virtual))
|
msg += "[{0}]\n".format(", ".join(non_virtual))
|
||||||
|
@@ -6,8 +6,9 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from itertools import zip_longest
|
import warnings
|
||||||
from typing import Dict, List, Optional
|
from itertools import islice, zip_longest
|
||||||
|
from typing import Callable, Dict, List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
@@ -16,6 +17,9 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
import spack.util.spack_yaml
|
||||||
|
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
||||||
|
from spack.tokenize import Token
|
||||||
from spack.util.executable import Executable, which
|
from spack.util.executable import Executable, which
|
||||||
|
|
||||||
description = "runs source code style checks on spack"
|
description = "runs source code style checks on spack"
|
||||||
@@ -198,6 +202,13 @@ def setup_parser(subparser):
|
|||||||
action="append",
|
action="append",
|
||||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--spec-strings",
|
||||||
|
action="store_true",
|
||||||
|
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
||||||
|
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
||||||
|
"will be removed in Spack v1.0.",
|
||||||
|
)
|
||||||
|
|
||||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||||
|
|
||||||
@@ -423,7 +434,8 @@ def _run_import_check(
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
for m in is_abs_import.finditer(contents):
|
for m in is_abs_import.finditer(contents):
|
||||||
if contents.count(m.group(1)) == 1:
|
# Find at most two occurences: the first is the import itself, the second is its usage.
|
||||||
|
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
|
||||||
to_remove.append(m.group(0))
|
to_remove.append(m.group(0))
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
||||||
@@ -438,7 +450,7 @@ def _run_import_check(
|
|||||||
module = _module_part(root, m.group(0))
|
module = _module_part(root, m.group(0))
|
||||||
if not module or module in to_add:
|
if not module or module in to_add:
|
||||||
continue
|
continue
|
||||||
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
|
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
|
||||||
continue
|
continue
|
||||||
to_add.add(module)
|
to_add.add(module)
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -506,7 +518,196 @@ def _bootstrap_dev_dependencies():
|
|||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
|
|
||||||
|
|
||||||
|
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
||||||
|
# only move the compiler to the back if it exists and is not already at the end
|
||||||
|
if not 0 <= idx < len(blocks) - 1:
|
||||||
|
return
|
||||||
|
# if there's only whitespace after the compiler, don't move it
|
||||||
|
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
||||||
|
return
|
||||||
|
# rotate left and always add at least one WS token between compiler and previous token
|
||||||
|
compiler_block = blocks.pop(idx)
|
||||||
|
if compiler_block[0].kind != SpecTokens.WS:
|
||||||
|
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
||||||
|
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
||||||
|
# WS tokens.
|
||||||
|
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
||||||
|
blocks[0].pop(0)
|
||||||
|
blocks.append(compiler_block)
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_format(spec_str: str) -> Optional[str]:
|
||||||
|
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
|
||||||
|
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
|
||||||
|
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
|
||||||
|
# the end when we hit a dependency ^... or the end of a string.
|
||||||
|
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
|
||||||
|
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
|
||||||
|
|
||||||
|
current_block: List[Token] = []
|
||||||
|
blocks: List[List[Token]] = []
|
||||||
|
compiler_block_idx = -1
|
||||||
|
in_edge_attr = False
|
||||||
|
|
||||||
|
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
||||||
|
if token.kind == SpecTokens.UNEXPECTED:
|
||||||
|
# parsing error, we cannot fix this string.
|
||||||
|
return None
|
||||||
|
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
||||||
|
# multiple compilers are not supported in Spack v0.x, so early return
|
||||||
|
if compiler_block_idx != -1:
|
||||||
|
return None
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
compiler_block_idx = len(blocks) - 1
|
||||||
|
elif token.kind in (
|
||||||
|
SpecTokens.START_EDGE_PROPERTIES,
|
||||||
|
SpecTokens.DEPENDENCY,
|
||||||
|
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
||||||
|
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
||||||
|
):
|
||||||
|
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||||
|
compiler_block_idx = -1
|
||||||
|
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
||||||
|
in_edge_attr = True
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
||||||
|
in_edge_attr = False
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
elif in_edge_attr:
|
||||||
|
current_block.append(token)
|
||||||
|
elif token.kind in (
|
||||||
|
SpecTokens.VERSION_HASH_PAIR,
|
||||||
|
SpecTokens.GIT_VERSION,
|
||||||
|
SpecTokens.VERSION,
|
||||||
|
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
||||||
|
SpecTokens.BOOL_VARIANT,
|
||||||
|
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
||||||
|
SpecTokens.KEY_VALUE_PAIR,
|
||||||
|
SpecTokens.DAG_HASH,
|
||||||
|
):
|
||||||
|
current_block.append(token)
|
||||||
|
blocks.append(current_block)
|
||||||
|
current_block = []
|
||||||
|
elif token.kind == SpecTokens.WS:
|
||||||
|
current_block.append(token)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"unexpected token {token}")
|
||||||
|
|
||||||
|
if current_block:
|
||||||
|
blocks.append(current_block)
|
||||||
|
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||||
|
|
||||||
|
new_spec_str = "".join(token.value for block in blocks for token in block)
|
||||||
|
return new_spec_str if spec_str != new_spec_str else None
|
||||||
|
|
||||||
|
|
||||||
|
SpecStrHandler = Callable[[str, int, int, str, str], None]
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
|
||||||
|
"""A SpecStrHandler that prints formatted spec strings and their locations."""
|
||||||
|
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
|
||||||
|
"""A SpecStrHandler that updates formatted spec strings in files."""
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
new_line = lines[line - 1].replace(old, new)
|
||||||
|
if new_line == lines[line - 1]:
|
||||||
|
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
|
||||||
|
return
|
||||||
|
lines[line - 1] = new_line
|
||||||
|
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
f.writelines(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
|
||||||
|
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
|
||||||
|
has_constant = sys.version_info >= (3, 8)
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if has_constant and isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||||
|
current_str = node.value
|
||||||
|
elif not has_constant and isinstance(node, ast.Str):
|
||||||
|
current_str = node.s
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
if not IS_PROBABLY_COMPILER.search(current_str):
|
||||||
|
continue
|
||||||
|
new = _spec_str_format(current_str)
|
||||||
|
if new is not None:
|
||||||
|
handler(path, node.lineno, node.col_offset, current_str, new)
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
|
||||||
|
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
|
||||||
|
queue = [data]
|
||||||
|
seen = set()
|
||||||
|
|
||||||
|
while queue:
|
||||||
|
current = queue.pop(0)
|
||||||
|
if id(current) in seen:
|
||||||
|
continue
|
||||||
|
seen.add(id(current))
|
||||||
|
if isinstance(current, dict):
|
||||||
|
queue.extend(current.values())
|
||||||
|
queue.extend(current.keys())
|
||||||
|
elif isinstance(current, list):
|
||||||
|
queue.extend(current)
|
||||||
|
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
|
||||||
|
new = _spec_str_format(current)
|
||||||
|
if new is not None:
|
||||||
|
mark = getattr(current, "_start_mark", None)
|
||||||
|
if mark:
|
||||||
|
line, col = mark.line + 1, mark.column + 1
|
||||||
|
else:
|
||||||
|
line, col = 0, 0
|
||||||
|
handler(path, line, col, current, new)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_spec_strings(
|
||||||
|
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
|
||||||
|
) -> None:
|
||||||
|
"""Open Python, JSON and YAML files, and format their string literals that look like spec
|
||||||
|
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
|
||||||
|
for path in paths:
|
||||||
|
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
|
||||||
|
is_python = path.endswith(".py")
|
||||||
|
if not is_json_or_yaml and not is_python:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
# skip files that are likely too large to be user code or config
|
||||||
|
if os.fstat(f.fileno()).st_size > 1024 * 1024:
|
||||||
|
warnings.warn(f"skipping {path}: too large.")
|
||||||
|
continue
|
||||||
|
if is_json_or_yaml:
|
||||||
|
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
|
||||||
|
elif is_python:
|
||||||
|
_spec_str_ast(path, ast.parse(f.read()), handler)
|
||||||
|
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
|
||||||
|
warnings.warn(f"skipping {path}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
def style(parser, args):
|
def style(parser, args):
|
||||||
|
if args.spec_strings:
|
||||||
|
if not args.files:
|
||||||
|
tty.die("No files provided to check spec strings.")
|
||||||
|
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
|
||||||
|
return _check_spec_strings(args.files, handler)
|
||||||
|
|
||||||
# save initial working directory for relativizing paths later
|
# save initial working directory for relativizing paths later
|
||||||
args.initial_working_dir = os.getcwd()
|
args.initial_working_dir = os.getcwd()
|
||||||
|
|
||||||
|
@@ -177,16 +177,15 @@ def test_run(args):
|
|||||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||||
if spec and not matching:
|
if spec and not matching:
|
||||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||||
"""
|
|
||||||
TODO: Need to write out a log message and/or CDASH Testing
|
|
||||||
output that package not installed IF continue to process
|
|
||||||
these issues here.
|
|
||||||
|
|
||||||
if args.log_format:
|
# TODO: Need to write out a log message and/or CDASH Testing
|
||||||
# Proceed with the spec assuming the test process
|
# output that package not installed IF continue to process
|
||||||
# to ensure report package as skipped (e.g., for CI)
|
# these issues here.
|
||||||
specs_to_test.append(spec)
|
|
||||||
"""
|
# if args.log_format:
|
||||||
|
# # Proceed with the spec assuming the test process
|
||||||
|
# # to ensure report package as skipped (e.g., for CI)
|
||||||
|
# specs_to_test.append(spec)
|
||||||
|
|
||||||
specs_to_test.extend(matching)
|
specs_to_test.extend(matching)
|
||||||
|
|
||||||
@@ -253,7 +252,9 @@ def has_test_and_tags(pkg_class):
|
|||||||
hashes = env.all_hashes() if env else None
|
hashes = env.all_hashes() if env else None
|
||||||
|
|
||||||
specs = spack.store.STORE.db.query(hashes=hashes)
|
specs = spack.store.STORE.db.query(hashes=hashes)
|
||||||
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
|
specs = list(
|
||||||
|
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
|
||||||
|
)
|
||||||
|
|
||||||
spack.cmd.display_specs(specs, long=True)
|
spack.cmd.display_specs(specs, long=True)
|
||||||
|
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import collections
|
import collections
|
||||||
import io
|
import io
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -216,7 +216,7 @@ def unit_test(parser, args, unknown_args):
|
|||||||
# Ensure clingo is available before switching to the
|
# Ensure clingo is available before switching to the
|
||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_core_dependencies()
|
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||||
if pytest is None:
|
if pytest is None:
|
||||||
spack.bootstrap.ensure_environment_dependencies()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
import pytest
|
import pytest
|
||||||
|
@@ -2,35 +2,48 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import argparse
|
import argparse
|
||||||
|
import io
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.string import plural
|
||||||
|
from llnl.util.filesystem import visit_directory_tree
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.verify
|
import spack.verify
|
||||||
|
import spack.verify_libraries
|
||||||
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
description = "check that all spack packages are on disk as installed"
|
description = "verify spack installations on disk"
|
||||||
section = "admin"
|
section = "admin"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
|
||||||
|
|
||||||
def setup_parser(subparser):
|
|
||||||
setup_parser.parser = subparser
|
|
||||||
|
|
||||||
subparser.add_argument(
|
def setup_parser(subparser: argparse.ArgumentParser):
|
||||||
|
global MANIFEST_SUBPARSER
|
||||||
|
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
|
||||||
|
|
||||||
|
MANIFEST_SUBPARSER = sp.add_parser(
|
||||||
|
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
|
||||||
|
)
|
||||||
|
MANIFEST_SUBPARSER.add_argument(
|
||||||
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
"-l", "--local", action="store_true", help="verify only locally installed packages"
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
MANIFEST_SUBPARSER.add_argument(
|
||||||
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
|
||||||
)
|
)
|
||||||
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages")
|
||||||
subparser.add_argument(
|
MANIFEST_SUBPARSER.add_argument(
|
||||||
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
|
||||||
)
|
)
|
||||||
|
|
||||||
type = subparser.add_mutually_exclusive_group()
|
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group()
|
||||||
type.add_argument(
|
manifest_sp_type.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
"--specs",
|
"--specs",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -39,7 +52,7 @@ def setup_parser(subparser):
|
|||||||
default="specs",
|
default="specs",
|
||||||
help="treat entries as specs (default)",
|
help="treat entries as specs (default)",
|
||||||
)
|
)
|
||||||
type.add_argument(
|
manifest_sp_type.add_argument(
|
||||||
"-f",
|
"-f",
|
||||||
"--files",
|
"--files",
|
||||||
action="store_const",
|
action="store_const",
|
||||||
@@ -49,14 +62,67 @@ def setup_parser(subparser):
|
|||||||
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
libraries_subparser = sp.add_parser(
|
||||||
|
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
|
||||||
|
)
|
||||||
|
|
||||||
|
arguments.add_common_arguments(libraries_subparser, ["constraint"])
|
||||||
|
|
||||||
|
|
||||||
def verify(parser, args):
|
def verify(parser, args):
|
||||||
|
cmd = args.verify_command
|
||||||
|
if cmd == "libraries":
|
||||||
|
return verify_libraries(args)
|
||||||
|
elif cmd == "manifest":
|
||||||
|
return verify_manifest(args)
|
||||||
|
parser.error("invalid verify subcommand")
|
||||||
|
|
||||||
|
|
||||||
|
def verify_libraries(args):
|
||||||
|
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
|
||||||
|
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
|
||||||
|
|
||||||
|
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
|
||||||
|
|
||||||
|
errors = 0
|
||||||
|
for spec in specs_from_db:
|
||||||
|
try:
|
||||||
|
pkg = spec.package
|
||||||
|
except Exception:
|
||||||
|
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
|
||||||
|
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
|
||||||
|
if error_msg is not None:
|
||||||
|
errors += 1
|
||||||
|
tty.error(error_msg)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
|
||||||
|
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
|
||||||
|
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
|
||||||
|
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
|
||||||
|
)
|
||||||
|
visit_directory_tree(spec.prefix, visitor)
|
||||||
|
|
||||||
|
if not visitor.problems:
|
||||||
|
return None
|
||||||
|
|
||||||
|
output = io.StringIO()
|
||||||
|
visitor.write(output, indent=4, brief=True)
|
||||||
|
message = output.getvalue().rstrip()
|
||||||
|
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
|
||||||
|
|
||||||
|
|
||||||
|
def verify_manifest(args):
|
||||||
|
"""verify that install directories have not been modified since installation"""
|
||||||
local = args.local
|
local = args.local
|
||||||
|
|
||||||
if args.type == "files":
|
if args.type == "files":
|
||||||
if args.all:
|
if args.all:
|
||||||
setup_parser.parser.print_help()
|
MANIFEST_SUBPARSER.error("cannot use --all with --files")
|
||||||
return 1
|
|
||||||
|
|
||||||
for file in args.specs_or_files:
|
for file in args.specs_or_files:
|
||||||
results = spack.verify.check_file_manifest(file)
|
results = spack.verify.check_file_manifest(file)
|
||||||
@@ -87,8 +153,7 @@ def verify(parser, args):
|
|||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
|
||||||
else:
|
else:
|
||||||
setup_parser.parser.print_help()
|
MANIFEST_SUBPARSER.error("use --all or specify specs to verify")
|
||||||
return 1
|
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
tty.debug("Verifying package %s")
|
tty.debug("Verifying package %s")
|
||||||
|
@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
|
|||||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||||
if not spec.architecture:
|
if not spec.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.default_operating_system()
|
||||||
target = host_platform.target("default_target")
|
target = host_platform.default_target()
|
||||||
else:
|
else:
|
||||||
target = spec.architecture.target
|
target = spec.architecture.target
|
||||||
if not target:
|
if not target:
|
||||||
target = spack.platforms.host().target("default_target")
|
target = spack.platforms.host().default_target()
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.default_operating_system()
|
||||||
return operating_system, target
|
return operating_system, target
|
||||||
|
|
||||||
|
|
||||||
|
@@ -37,13 +37,12 @@ def enable_compiler_existence_check():
|
|||||||
|
|
||||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
||||||
SpecPair = Tuple[Spec, Spec]
|
SpecPair = Tuple[Spec, Spec]
|
||||||
SpecLike = Union[Spec, str]
|
|
||||||
TestsType = Union[bool, Iterable[str]]
|
TestsType = Union[bool, Iterable[str]]
|
||||||
|
|
||||||
|
|
||||||
def concretize_specs_together(
|
def _concretize_specs_together(
|
||||||
abstract_specs: Sequence[SpecLike], tests: TestsType = False
|
abstract_specs: Sequence[Spec], tests: TestsType = False
|
||||||
) -> Sequence[Spec]:
|
) -> List[Spec]:
|
||||||
"""Given a number of specs as input, tries to concretize them together.
|
"""Given a number of specs as input, tries to concretize them together.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -51,11 +50,10 @@ def concretize_specs_together(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
import spack.solver.asp
|
from spack.solver.asp import Solver
|
||||||
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
solver = spack.solver.asp.Solver()
|
result = Solver().solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||||
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
|
||||||
return [s.copy() for s in result.specs]
|
return [s.copy() for s in result.specs]
|
||||||
|
|
||||||
|
|
||||||
@@ -72,7 +70,7 @@ def concretize_together(
|
|||||||
"""
|
"""
|
||||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||||
abstract_specs = [abstract for abstract, _ in spec_list]
|
abstract_specs = [abstract for abstract, _ in spec_list]
|
||||||
concrete_specs = concretize_specs_together(to_concretize, tests=tests)
|
concrete_specs = _concretize_specs_together(to_concretize, tests=tests)
|
||||||
return list(zip(abstract_specs, concrete_specs))
|
return list(zip(abstract_specs, concrete_specs))
|
||||||
|
|
||||||
|
|
||||||
@@ -90,7 +88,7 @@ def concretize_together_when_possible(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
import spack.solver.asp
|
from spack.solver.asp import Solver
|
||||||
|
|
||||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||||
old_concrete_to_abstract = {
|
old_concrete_to_abstract = {
|
||||||
@@ -98,9 +96,8 @@ def concretize_together_when_possible(
|
|||||||
}
|
}
|
||||||
|
|
||||||
result_by_user_spec = {}
|
result_by_user_spec = {}
|
||||||
solver = spack.solver.asp.Solver()
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
for result in solver.solve_in_rounds(
|
for result in Solver().solve_in_rounds(
|
||||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||||
):
|
):
|
||||||
result_by_user_spec.update(result.specs_by_input)
|
result_by_user_spec.update(result.specs_by_input)
|
||||||
@@ -124,7 +121,7 @@ def concretize_separately(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
import spack.bootstrap
|
from spack.bootstrap import ensure_bootstrap_configuration, ensure_clingo_importable_or_raise
|
||||||
|
|
||||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||||
args = [
|
args = [
|
||||||
@@ -134,8 +131,8 @@ def concretize_separately(
|
|||||||
]
|
]
|
||||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||||
# Ensure we don't try to bootstrap clingo in parallel
|
# Ensure we don't try to bootstrap clingo in parallel
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
# Ensure all the indexes have been built or updated, since
|
# Ensure all the indexes have been built or updated, since
|
||||||
# otherwise the processes in the pool may timeout on waiting
|
# otherwise the processes in the pool may timeout on waiting
|
||||||
@@ -190,10 +187,52 @@ def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int,
|
|||||||
index, spec_str, tests = packed_arguments
|
index, spec_str, tests = packed_arguments
|
||||||
with tty.SuppressOutput(msg_enabled=False):
|
with tty.SuppressOutput(msg_enabled=False):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
spec = Spec(spec_str).concretized(tests=tests)
|
spec = concretize_one(Spec(spec_str), tests=tests)
|
||||||
return index, spec, time.time() - start
|
return index, spec, time.time() - start
|
||||||
|
|
||||||
|
|
||||||
|
def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
|
||||||
|
"""Return a concretized copy of the given spec.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tests: if False disregard 'test' dependencies, if a list of names activate them for
|
||||||
|
the packages in the list, if True activate 'test' dependencies for all packages.
|
||||||
|
"""
|
||||||
|
from spack.solver.asp import Solver, SpecBuilder
|
||||||
|
|
||||||
|
if isinstance(spec, str):
|
||||||
|
spec = Spec(spec)
|
||||||
|
spec = spec.lookup_hash()
|
||||||
|
|
||||||
|
if spec.concrete:
|
||||||
|
return spec.copy()
|
||||||
|
|
||||||
|
for node in spec.traverse():
|
||||||
|
if not node.name:
|
||||||
|
raise spack.error.SpecError(
|
||||||
|
f"Spec {node} has no name; cannot concretize an anonymous spec"
|
||||||
|
)
|
||||||
|
|
||||||
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
|
result = Solver().solve([spec], tests=tests, allow_deprecated=allow_deprecated)
|
||||||
|
|
||||||
|
# take the best answer
|
||||||
|
opt, i, answer = min(result.answers)
|
||||||
|
name = spec.name
|
||||||
|
# TODO: Consolidate this code with similar code in solve.py
|
||||||
|
if spack.repo.PATH.is_virtual(spec.name):
|
||||||
|
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
||||||
|
name = providers[0]
|
||||||
|
|
||||||
|
node = SpecBuilder.make_node(pkg=name)
|
||||||
|
assert (
|
||||||
|
node in answer
|
||||||
|
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||||
|
|
||||||
|
concretized = answer[node]
|
||||||
|
return concretized
|
||||||
|
|
||||||
|
|
||||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||||
"""Raised when there is no available compiler that satisfies a
|
"""Raised when there is no available compiler that satisfies a
|
||||||
compiler spec."""
|
compiler spec."""
|
||||||
|
@@ -32,15 +32,17 @@
|
|||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
from llnl.util import filesystem, lang, tty
|
from llnl.util import filesystem, lang, tty
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
|
||||||
import spack.schema
|
import spack.schema
|
||||||
import spack.schema.bootstrap
|
import spack.schema.bootstrap
|
||||||
import spack.schema.cdash
|
import spack.schema.cdash
|
||||||
@@ -51,23 +53,29 @@
|
|||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
|
import spack.schema.env_vars
|
||||||
|
import spack.schema.include
|
||||||
|
import spack.schema.merged
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
import spack.schema.repos
|
import spack.schema.repos
|
||||||
import spack.schema.upstreams
|
import spack.schema.upstreams
|
||||||
import spack.schema.view
|
import spack.schema.view
|
||||||
|
import spack.util.remote_file_cache as rfc_util
|
||||||
# Hacked yaml for configuration files preserves line numbers.
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.web as web_util
|
|
||||||
from spack.util.cpus import cpus_available
|
from spack.util.cpus import cpus_available
|
||||||
|
from spack.util.spack_yaml import get_mark_from_yaml_data
|
||||||
|
|
||||||
|
from .enums import ConfigScopePriority
|
||||||
|
|
||||||
#: Dict from section names -> schema for that section
|
#: Dict from section names -> schema for that section
|
||||||
SECTION_SCHEMAS: Dict[str, Any] = {
|
SECTION_SCHEMAS: Dict[str, Any] = {
|
||||||
"compilers": spack.schema.compilers.schema,
|
"compilers": spack.schema.compilers.schema,
|
||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
|
"env_vars": spack.schema.env_vars.schema,
|
||||||
|
"include": spack.schema.include.schema,
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -115,6 +123,17 @@
|
|||||||
#: Type used for raw YAML configuration
|
#: Type used for raw YAML configuration
|
||||||
YamlConfigDict = Dict[str, Any]
|
YamlConfigDict = Dict[str, Any]
|
||||||
|
|
||||||
|
#: prefix for name of included configuration scopes
|
||||||
|
INCLUDE_SCOPE_PREFIX = "include"
|
||||||
|
|
||||||
|
#: safeguard for recursive includes -- maximum include depth
|
||||||
|
MAX_RECURSIVE_INCLUDES = 100
|
||||||
|
|
||||||
|
|
||||||
|
def _include_cache_location():
|
||||||
|
"""Location to cache included configuration files."""
|
||||||
|
return os.path.join(spack.paths.user_cache_path, "includes")
|
||||||
|
|
||||||
|
|
||||||
class ConfigScope:
|
class ConfigScope:
|
||||||
def __init__(self, name: str) -> None:
|
def __init__(self, name: str) -> None:
|
||||||
@@ -122,6 +141,25 @@ def __init__(self, name: str) -> None:
|
|||||||
self.writable = False
|
self.writable = False
|
||||||
self.sections = syaml.syaml_dict()
|
self.sections = syaml.syaml_dict()
|
||||||
|
|
||||||
|
#: names of any included scopes
|
||||||
|
self._included_scopes: Optional[List["ConfigScope"]] = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def included_scopes(self) -> List["ConfigScope"]:
|
||||||
|
"""Memoized list of included scopes, in the order they appear in this scope."""
|
||||||
|
if self._included_scopes is None:
|
||||||
|
self._included_scopes = []
|
||||||
|
|
||||||
|
includes = self.get_section("include")
|
||||||
|
if includes:
|
||||||
|
include_paths = [included_path(data) for data in includes["include"]]
|
||||||
|
for path in include_paths:
|
||||||
|
included_scope = include_path_scope(path)
|
||||||
|
if included_scope:
|
||||||
|
self._included_scopes.append(included_scope)
|
||||||
|
|
||||||
|
return self._included_scopes
|
||||||
|
|
||||||
def get_section_filename(self, section: str) -> str:
|
def get_section_filename(self, section: str) -> str:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@@ -404,26 +442,18 @@ def _method(self, *args, **kwargs):
|
|||||||
return _method
|
return _method
|
||||||
|
|
||||||
|
|
||||||
class Configuration:
|
ScopeWithOptionalPriority = Union[ConfigScope, Tuple[int, ConfigScope]]
|
||||||
"""A full Spack configuration, from a hierarchy of config files.
|
ScopeWithPriority = Tuple[int, ConfigScope]
|
||||||
|
|
||||||
This class makes it easy to add a new scope on top of an existing one.
|
|
||||||
"""
|
class Configuration:
|
||||||
|
"""A hierarchical configuration, merging a number of scopes at different priorities."""
|
||||||
|
|
||||||
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
|
||||||
scopes: Dict[str, ConfigScope]
|
scopes: lang.PriorityOrderedMapping[str, ConfigScope]
|
||||||
|
|
||||||
def __init__(self, *scopes: ConfigScope) -> None:
|
def __init__(self) -> None:
|
||||||
"""Initialize a configuration with an initial list of scopes.
|
self.scopes = lang.PriorityOrderedMapping()
|
||||||
|
|
||||||
Args:
|
|
||||||
scopes: list of scopes to add to this
|
|
||||||
Configuration, ordered from lowest to highest precedence
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.scopes = collections.OrderedDict()
|
|
||||||
for scope in scopes:
|
|
||||||
self.push_scope(scope)
|
|
||||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||||
|
|
||||||
def ensure_unwrapped(self) -> "Configuration":
|
def ensure_unwrapped(self) -> "Configuration":
|
||||||
@@ -431,36 +461,59 @@ def ensure_unwrapped(self) -> "Configuration":
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def highest(self) -> ConfigScope:
|
def highest(self) -> ConfigScope:
|
||||||
"""Scope with highest precedence"""
|
"""Scope with the highest precedence"""
|
||||||
return next(reversed(self.scopes.values())) # type: ignore
|
return next(self.scopes.reversed_values()) # type: ignore
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def ensure_scope_ordering(self):
|
def push_scope(
|
||||||
"""Ensure that scope order matches documented precedent"""
|
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0
|
||||||
# FIXME: We also need to consider that custom configurations and other orderings
|
) -> None:
|
||||||
# may not be preserved correctly
|
"""Adds a scope to the Configuration, at a given priority.
|
||||||
if "command_line" in self.scopes:
|
|
||||||
# TODO (when dropping python 3.6): self.scopes.move_to_end
|
|
||||||
self.scopes["command_line"] = self.remove_scope("command_line")
|
|
||||||
|
|
||||||
@_config_mutator
|
If a priority is not given, it is assumed to be the current highest priority.
|
||||||
def push_scope(self, scope: ConfigScope) -> None:
|
|
||||||
"""Add a higher precedence scope to the Configuration."""
|
|
||||||
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
|
|
||||||
self.scopes[scope.name] = scope
|
|
||||||
|
|
||||||
@_config_mutator
|
Args:
|
||||||
def pop_scope(self) -> ConfigScope:
|
scope: scope to be added
|
||||||
"""Remove the highest precedence scope and return it."""
|
priority: priority of the scope
|
||||||
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
|
"""
|
||||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
# TODO: As a follow on to #48784, change this to create a graph of the
|
||||||
return scope
|
# TODO: includes AND ensure properly sorted such that the order included
|
||||||
|
# TODO: at the highest level is reflected in the value of an option that
|
||||||
|
# TODO: is set in multiple included files.
|
||||||
|
# before pushing the scope itself, push any included scopes recursively, at same priority
|
||||||
|
for included_scope in reversed(scope.included_scopes):
|
||||||
|
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
|
||||||
|
mark = ""
|
||||||
|
if hasattr(included_scope, "path") and syaml.marked(included_scope.path):
|
||||||
|
mark = included_scope.path._start_mark # type: ignore
|
||||||
|
raise RecursiveIncludeError(
|
||||||
|
f"Maximum include recursion exceeded in {included_scope.name}", str(mark)
|
||||||
|
)
|
||||||
|
|
||||||
|
# record this inclusion so that remove_scope() can use it
|
||||||
|
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
|
||||||
|
|
||||||
|
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
|
||||||
|
self.scopes.add(scope.name, value=scope, priority=priority)
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||||
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
|
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
|
||||||
scope = self.scopes.pop(scope_name, None)
|
|
||||||
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
|
try:
|
||||||
|
scope = self.scopes.remove(scope_name)
|
||||||
|
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
|
||||||
|
except KeyError as e:
|
||||||
|
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# transitively remove included scopes
|
||||||
|
for included_scope in scope.included_scopes:
|
||||||
|
assert (
|
||||||
|
included_scope.name in self.scopes
|
||||||
|
), f"Included scope '{included_scope.name}' was never added to configuration!"
|
||||||
|
self.remove_scope(included_scope.name)
|
||||||
|
|
||||||
return scope
|
return scope
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -469,15 +522,13 @@ def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
|||||||
return (s for s in self.scopes.values() if s.writable)
|
return (s for s in self.scopes.values() if s.writable)
|
||||||
|
|
||||||
def highest_precedence_scope(self) -> ConfigScope:
|
def highest_precedence_scope(self) -> ConfigScope:
|
||||||
"""Writable scope with highest precedence."""
|
"""Writable scope with the highest precedence."""
|
||||||
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
return next(s for s in self.scopes.reversed_values() if s.writable)
|
||||||
|
|
||||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||||
"""Writable non-platform scope with highest precedence"""
|
"""Writable non-platform scope with the highest precedence"""
|
||||||
return next(
|
return next(
|
||||||
s
|
s for s in self.scopes.reversed_values() if s.writable and not s.is_platform_dependent
|
||||||
for s in reversed(self.scopes.values()) # type: ignore
|
|
||||||
if s.writable and not s.is_platform_dependent
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||||
@@ -744,7 +795,7 @@ def override(
|
|||||||
"""
|
"""
|
||||||
if isinstance(path_or_scope, ConfigScope):
|
if isinstance(path_or_scope, ConfigScope):
|
||||||
overrides = path_or_scope
|
overrides = path_or_scope
|
||||||
CONFIG.push_scope(path_or_scope)
|
CONFIG.push_scope(path_or_scope, priority=None)
|
||||||
else:
|
else:
|
||||||
base_name = _OVERRIDES_BASE_NAME
|
base_name = _OVERRIDES_BASE_NAME
|
||||||
# Ensure the new override gets a unique scope name
|
# Ensure the new override gets a unique scope name
|
||||||
@@ -758,7 +809,7 @@ def override(
|
|||||||
break
|
break
|
||||||
|
|
||||||
overrides = InternalConfigScope(scope_name)
|
overrides = InternalConfigScope(scope_name)
|
||||||
CONFIG.push_scope(overrides)
|
CONFIG.push_scope(overrides, priority=None)
|
||||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -768,13 +819,86 @@ def override(
|
|||||||
assert scope is overrides
|
assert scope is overrides
|
||||||
|
|
||||||
|
|
||||||
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
|
def _add_platform_scope(
|
||||||
|
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
|
||||||
|
) -> None:
|
||||||
"""Add a platform-specific subdirectory for the current platform."""
|
"""Add a platform-specific subdirectory for the current platform."""
|
||||||
|
import spack.platforms # circular dependency
|
||||||
|
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
scope = DirectoryConfigScope(
|
scope = DirectoryConfigScope(
|
||||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||||
)
|
)
|
||||||
cfg.push_scope(scope)
|
cfg.push_scope(scope, priority=priority)
|
||||||
|
|
||||||
|
|
||||||
|
#: Class for the relevance of an optional path conditioned on a limited
|
||||||
|
#: python code that evaluates to a boolean and or explicit specification
|
||||||
|
#: as optional.
|
||||||
|
class IncludePath(NamedTuple):
|
||||||
|
path: str
|
||||||
|
when: str
|
||||||
|
sha256: str
|
||||||
|
optional: bool
|
||||||
|
|
||||||
|
|
||||||
|
def included_path(entry: Union[str, dict]) -> IncludePath:
|
||||||
|
"""Convert the included path entry into an IncludePath.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entry: include configuration entry
|
||||||
|
|
||||||
|
Returns: converted entry, where an empty ``when`` means the path is
|
||||||
|
not conditionally included
|
||||||
|
"""
|
||||||
|
if isinstance(entry, str):
|
||||||
|
return IncludePath(path=entry, sha256="", when="", optional=False)
|
||||||
|
|
||||||
|
path = entry["path"]
|
||||||
|
sha256 = entry.get("sha256", "")
|
||||||
|
when = entry.get("when", "")
|
||||||
|
optional = entry.get("optional", False)
|
||||||
|
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
|
||||||
|
|
||||||
|
|
||||||
|
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
|
||||||
|
"""Instantiate an appropriate configuration scope for the given path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include: optional include path
|
||||||
|
|
||||||
|
Returns: configuration scope
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: included path has an unsupported URL scheme, is required
|
||||||
|
but does not exist; configuration stage directory argument is missing
|
||||||
|
ConfigFileError: unable to access remote configuration file(s)
|
||||||
|
"""
|
||||||
|
# circular dependencies
|
||||||
|
import spack.spec
|
||||||
|
|
||||||
|
if (not include.when) or spack.spec.eval_conditional(include.when):
|
||||||
|
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
|
||||||
|
if not config_path:
|
||||||
|
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
|
||||||
|
|
||||||
|
if os.path.isdir(config_path):
|
||||||
|
# directories are treated as regular ConfigScopes
|
||||||
|
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
|
||||||
|
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||||
|
return DirectoryConfigScope(config_name, config_path)
|
||||||
|
|
||||||
|
if os.path.exists(config_path):
|
||||||
|
# files are assumed to be SingleFileScopes
|
||||||
|
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
|
||||||
|
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||||
|
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
|
||||||
|
|
||||||
|
if not include.optional:
|
||||||
|
path = f" at ({config_path})" if config_path != include.path else ""
|
||||||
|
raise ValueError(f"Required path ({include.path}) does not exist{path}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||||
@@ -802,18 +926,17 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
return config_paths
|
return config_paths
|
||||||
|
|
||||||
|
|
||||||
def create() -> Configuration:
|
def create_incremental() -> Generator[Configuration, None, None]:
|
||||||
"""Singleton Configuration instance.
|
"""Singleton Configuration instance.
|
||||||
|
|
||||||
This constructs one instance associated with this module and returns
|
This constructs one instance associated with this module and returns
|
||||||
it. It is bundled inside a function so that configuration can be
|
it. It is bundled inside a function so that configuration can be
|
||||||
initialized lazily.
|
initialized lazily.
|
||||||
"""
|
"""
|
||||||
cfg = Configuration()
|
|
||||||
|
|
||||||
# first do the builtin, hardcoded defaults
|
# first do the builtin, hardcoded defaults
|
||||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
cfg = create_from(
|
||||||
cfg.push_scope(builtin)
|
(ConfigScopePriority.BUILTIN, InternalConfigScope("_builtin", CONFIG_DEFAULTS))
|
||||||
|
)
|
||||||
|
|
||||||
# Builtin paths to configuration files in Spack
|
# Builtin paths to configuration files in Spack
|
||||||
configuration_paths = [
|
configuration_paths = [
|
||||||
@@ -843,16 +966,29 @@ def create() -> Configuration:
|
|||||||
|
|
||||||
# add each scope and its platform-specific directory
|
# add each scope and its platform-specific directory
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
cfg.push_scope(DirectoryConfigScope(name, path))
|
cfg.push_scope(DirectoryConfigScope(name, path), priority=ConfigScopePriority.CONFIG_FILES)
|
||||||
|
# Each scope can have per-platform overrides in subdirectories
|
||||||
|
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
|
||||||
|
|
||||||
# Each scope can have per-platfom overrides in subdirectories
|
# yield the config incrementally so that each config level's init code can get
|
||||||
_add_platform_scope(cfg, name, path)
|
# data from the one below. This can be tricky, but it enables us to have a
|
||||||
|
# single unified config system.
|
||||||
|
#
|
||||||
|
# TODO: think about whether we want to restrict what types of config can be used
|
||||||
|
# at each level. e.g., we may want to just more forcibly disallow remote
|
||||||
|
# config (which uses ssl and other config options) for some of the scopes,
|
||||||
|
# to make the bootstrap issues more explicit, even if allowing config scope
|
||||||
|
# init to reference lower scopes is more flexible.
|
||||||
|
yield cfg
|
||||||
|
|
||||||
return cfg
|
|
||||||
|
def create() -> Configuration:
|
||||||
|
"""Create a configuration using create_incremental(), return the last yielded result."""
|
||||||
|
return list(create_incremental())[-1]
|
||||||
|
|
||||||
|
|
||||||
#: This is the singleton configuration instance for Spack.
|
#: This is the singleton configuration instance for Spack.
|
||||||
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||||
@@ -948,10 +1084,11 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
|
|
||||||
Accepts the path syntax described in ``get()``.
|
Accepts the path syntax described in ``get()``.
|
||||||
"""
|
"""
|
||||||
return CONFIG.set(path, value, scope)
|
result = CONFIG.set(path, value, scope)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def scopes() -> Dict[str, ConfigScope]:
|
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
|
||||||
"""Convenience function to get list of configuration scopes."""
|
"""Convenience function to get list of configuration scopes."""
|
||||||
return CONFIG.scopes
|
return CONFIG.scopes
|
||||||
|
|
||||||
@@ -1048,8 +1185,6 @@ def validate(
|
|||||||
This leverages the line information (start_mark, end_mark) stored
|
This leverages the line information (start_mark, end_mark) stored
|
||||||
on Spack YAML structures.
|
on Spack YAML structures.
|
||||||
"""
|
"""
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spack.schema.Validator(schema).validate(data)
|
spack.schema.Validator(schema).validate(data)
|
||||||
except jsonschema.ValidationError as e:
|
except jsonschema.ValidationError as e:
|
||||||
@@ -1407,7 +1542,7 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_configuration(
|
def use_configuration(
|
||||||
*scopes_or_paths: Union[ConfigScope, str]
|
*scopes_or_paths: Union[ScopeWithOptionalPriority, str]
|
||||||
) -> Generator[Configuration, None, None]:
|
) -> Generator[Configuration, None, None]:
|
||||||
"""Use the configuration scopes passed as arguments within the context manager.
|
"""Use the configuration scopes passed as arguments within the context manager.
|
||||||
|
|
||||||
@@ -1422,7 +1557,7 @@ def use_configuration(
|
|||||||
global CONFIG
|
global CONFIG
|
||||||
|
|
||||||
# Normalize input and construct a Configuration object
|
# Normalize input and construct a Configuration object
|
||||||
configuration = _config_from(scopes_or_paths)
|
configuration = create_from(*scopes_or_paths)
|
||||||
CONFIG.clear_caches(), configuration.clear_caches()
|
CONFIG.clear_caches(), configuration.clear_caches()
|
||||||
|
|
||||||
saved_config, CONFIG = CONFIG, configuration
|
saved_config, CONFIG = CONFIG, configuration
|
||||||
@@ -1433,137 +1568,44 @@ def use_configuration(
|
|||||||
CONFIG = saved_config
|
CONFIG = saved_config
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_input(entry: Union[ScopeWithOptionalPriority, str]) -> ScopeWithPriority:
|
||||||
|
if isinstance(entry, tuple):
|
||||||
|
return entry
|
||||||
|
|
||||||
|
default_priority = ConfigScopePriority.CONFIG_FILES
|
||||||
|
if isinstance(entry, ConfigScope):
|
||||||
|
return default_priority, entry
|
||||||
|
|
||||||
|
# Otherwise we need to construct it
|
||||||
|
path = os.path.normpath(entry)
|
||||||
|
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||||
|
name = os.path.basename(path)
|
||||||
|
return default_priority, DirectoryConfigScope(name, path)
|
||||||
|
|
||||||
|
|
||||||
@lang.memoized
|
@lang.memoized
|
||||||
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
|
def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Configuration:
|
||||||
scopes = []
|
"""Creates a configuration object from the scopes passed in input.
|
||||||
for scope_or_path in scopes_or_paths:
|
|
||||||
# If we have a config scope we are already done
|
|
||||||
if isinstance(scope_or_path, ConfigScope):
|
|
||||||
scopes.append(scope_or_path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Otherwise we need to construct it
|
|
||||||
path = os.path.normpath(scope_or_path)
|
|
||||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
|
||||||
name = os.path.basename(path)
|
|
||||||
scopes.append(DirectoryConfigScope(name, path))
|
|
||||||
|
|
||||||
configuration = Configuration(*scopes)
|
|
||||||
return configuration
|
|
||||||
|
|
||||||
|
|
||||||
def raw_github_gitlab_url(url: str) -> str:
|
|
||||||
"""Transform a github URL to the raw form to avoid undesirable html.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
url: url to be converted to raw form
|
*scopes_or_paths: either a tuple of (priority, ConfigScope), or a ConfigScope, or a string
|
||||||
|
If priority is not given, it is assumed to be ConfigScopePriority.CONFIG_FILES. If a
|
||||||
|
string is given, a DirectoryConfigScope is created from it.
|
||||||
|
|
||||||
Returns:
|
Examples:
|
||||||
Raw github/gitlab url or the original url
|
|
||||||
|
>>> builtin_scope = InternalConfigScope("_builtin", {"config": {"build_jobs": 1}})
|
||||||
|
>>> cl_scope = InternalConfigScope("command_line", {"config": {"build_jobs": 10}})
|
||||||
|
>>> cfg = create_from(
|
||||||
|
... (ConfigScopePriority.COMMAND_LINE, cl_scope),
|
||||||
|
... (ConfigScopePriority.BUILTIN, builtin_scope)
|
||||||
|
... )
|
||||||
"""
|
"""
|
||||||
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
|
scopes_with_priority = [_normalize_input(x) for x in scopes_or_paths]
|
||||||
# actual URL under https://raw.githubusercontent.com/ with '/blob'
|
result = Configuration()
|
||||||
# removed and or, '/blame' if needed.
|
for priority, scope in scopes_with_priority:
|
||||||
if "github" in url or "gitlab" in url:
|
result.push_scope(scope, priority=priority)
|
||||||
return url.replace("/blob/", "/raw/")
|
return result
|
||||||
|
|
||||||
return url
|
|
||||||
|
|
||||||
|
|
||||||
def collect_urls(base_url: str) -> list:
|
|
||||||
"""Return a list of configuration URLs.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
base_url: URL for a configuration (yaml) file or a directory
|
|
||||||
containing yaml file(s)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of configuration file(s) or empty list if none
|
|
||||||
"""
|
|
||||||
if not base_url:
|
|
||||||
return []
|
|
||||||
|
|
||||||
extension = ".yaml"
|
|
||||||
|
|
||||||
if base_url.endswith(extension):
|
|
||||||
return [base_url]
|
|
||||||
|
|
||||||
# Collect configuration URLs if the base_url is a "directory".
|
|
||||||
_, links = web_util.spider(base_url, 0)
|
|
||||||
return [link for link in links if link.endswith(extension)]
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
|
|
||||||
"""Retrieve configuration file(s) at the specified URL.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
url: URL for a configuration (yaml) file or a directory containing
|
|
||||||
yaml file(s)
|
|
||||||
dest_dir: destination directory
|
|
||||||
skip_existing: Skip files that already exist in dest_dir if
|
|
||||||
``True``; otherwise, replace those files
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Path to the corresponding file if URL is or contains a
|
|
||||||
single file and it is the only file in the destination directory or
|
|
||||||
the root (dest_dir) directory if multiple configuration files exist
|
|
||||||
or are retrieved.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _fetch_file(url):
|
|
||||||
raw = raw_github_gitlab_url(url)
|
|
||||||
tty.debug(f"Reading config from url {raw}")
|
|
||||||
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
|
|
||||||
|
|
||||||
if not url:
|
|
||||||
raise ConfigFileError("Cannot retrieve configuration without a URL")
|
|
||||||
|
|
||||||
# Return the local path to the cached configuration file OR to the
|
|
||||||
# directory containing the cached configuration files.
|
|
||||||
config_links = collect_urls(url)
|
|
||||||
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
|
|
||||||
|
|
||||||
paths = []
|
|
||||||
for config_url in config_links:
|
|
||||||
basename = os.path.basename(config_url)
|
|
||||||
if skip_existing and basename in existing_files:
|
|
||||||
tty.warn(
|
|
||||||
f"Will not fetch configuration from {config_url} since a "
|
|
||||||
f"version already exists in {dest_dir}"
|
|
||||||
)
|
|
||||||
path = os.path.join(dest_dir, basename)
|
|
||||||
else:
|
|
||||||
path = _fetch_file(config_url)
|
|
||||||
|
|
||||||
if path:
|
|
||||||
paths.append(path)
|
|
||||||
|
|
||||||
if paths:
|
|
||||||
return dest_dir if len(paths) > 1 else paths[0]
|
|
||||||
|
|
||||||
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_mark_from_yaml_data(obj):
|
|
||||||
"""Try to get ``spack.util.spack_yaml`` mark from YAML data.
|
|
||||||
|
|
||||||
We try the object, and if that fails we try its first member (if it's a container).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
mark if one is found, otherwise None.
|
|
||||||
"""
|
|
||||||
# mark of object itelf
|
|
||||||
mark = getattr(obj, "_start_mark", None)
|
|
||||||
if mark:
|
|
||||||
return mark
|
|
||||||
|
|
||||||
# mark of first member if it is a container
|
|
||||||
if isinstance(obj, (list, dict)):
|
|
||||||
first_member = next(iter(obj), None)
|
|
||||||
if first_member:
|
|
||||||
mark = getattr(first_member, "_start_mark", None)
|
|
||||||
|
|
||||||
return mark
|
|
||||||
|
|
||||||
|
|
||||||
def determine_number_of_jobs(
|
def determine_number_of_jobs(
|
||||||
@@ -1670,3 +1712,7 @@ def get_path(path, data):
|
|||||||
|
|
||||||
# give up and return None if nothing worked
|
# give up and return None if nothing worked
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class RecursiveIncludeError(spack.error.SpackError):
|
||||||
|
"""Too many levels of recursive includes."""
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
"""
|
"""
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.schema.env as env
|
import spack.schema.env as env
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -30,8 +32,6 @@ def validate(configuration_file):
|
|||||||
Returns:
|
Returns:
|
||||||
A sanitized copy of the configuration stored in the input file
|
A sanitized copy of the configuration stored in the input file
|
||||||
"""
|
"""
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
with open(configuration_file, encoding="utf-8") as f:
|
with open(configuration_file, encoding="utf-8") as f:
|
||||||
config = syaml.load(f)
|
config = syaml.load(f)
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ def validate(configuration_file):
|
|||||||
# Set the default value of the concretization strategy to unify and
|
# Set the default value of the concretization strategy to unify and
|
||||||
# warn if the user explicitly set another value
|
# warn if the user explicitly set another value
|
||||||
env_dict.setdefault("concretizer", {"unify": True})
|
env_dict.setdefault("concretizer", {"unify": True})
|
||||||
if not env_dict["concretizer"]["unify"] is True:
|
if env_dict["concretizer"]["unify"] is not True:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'"concretizer:unify" is not set to "true", which means the '
|
'"concretizer:unify" is not set to "true", which means the '
|
||||||
"generated image may contain different variants of the same "
|
"generated image may contain different variants of the same "
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user