Compare commits
704 Commits
backports/
...
containers
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bdcd817233 | ||
|
|
d39edeb9a1 | ||
|
|
453fb27be2 | ||
|
|
831f04fb7d | ||
|
|
04044a9744 | ||
|
|
8077285a63 | ||
|
|
537926c1a7 | ||
|
|
02ff3d7b1e | ||
|
|
491cb278f3 | ||
|
|
ed1ebefd8f | ||
|
|
36d64fcbd4 | ||
|
|
c5cdc2c0a2 | ||
|
|
0eca86f64f | ||
|
|
50027d76a5 | ||
|
|
b4748de5a9 | ||
|
|
8f2532c624 | ||
|
|
0726513334 | ||
|
|
f951f38883 | ||
|
|
5262412c13 | ||
|
|
f022b93249 | ||
|
|
60b5e98182 | ||
|
|
682acae9fd | ||
|
|
c0f80e9117 | ||
|
|
dcf13af459 | ||
|
|
a2e4fb6b95 | ||
|
|
9dd92f493a | ||
|
|
b23e832002 | ||
|
|
ae2f626168 | ||
|
|
a73930da81 | ||
|
|
921d446196 | ||
|
|
8e2ea5de9d | ||
|
|
fd0baca222 | ||
|
|
d2fc0d6a35 | ||
|
|
eedc9e0eaf | ||
|
|
6b85f6b405 | ||
|
|
5686a6b928 | ||
|
|
ad665c6af1 | ||
|
|
d78d6db61e | ||
|
|
f47c307bf4 | ||
|
|
5b4edb9499 | ||
|
|
a6e6093922 | ||
|
|
2e8b4e660e | ||
|
|
0ca1ee8b91 | ||
|
|
a322672259 | ||
|
|
6cab86d0c1 | ||
|
|
86e7e2e070 | ||
|
|
69fca439f4 | ||
|
|
3b90fb589f | ||
|
|
fff126204c | ||
|
|
98e626cf67 | ||
|
|
d8fe628a95 | ||
|
|
4c378840e3 | ||
|
|
18de6a480b | ||
|
|
c6da4d586b | ||
|
|
61d6fc70e8 | ||
|
|
7c65655c7e | ||
|
|
76ca264b72 | ||
|
|
aa58d3c170 | ||
|
|
a32b898a00 | ||
|
|
6fcd43ee64 | ||
|
|
f1f9f00d43 | ||
|
|
8ff27f9257 | ||
|
|
78c62532c7 | ||
|
|
a7f327dced | ||
|
|
310c435396 | ||
|
|
fa3f27e8e7 | ||
|
|
6b0fefff29 | ||
|
|
f613316282 | ||
|
|
1b5b74390f | ||
|
|
b57f88cb89 | ||
|
|
03afc2a1e6 | ||
|
|
1f6ed9324d | ||
|
|
5559772afa | ||
|
|
8728631fe0 | ||
|
|
e34d9cbe5f | ||
|
|
0efba09990 | ||
|
|
a9cb80d792 | ||
|
|
dae6fe711c | ||
|
|
c9a24bc6c5 | ||
|
|
00663f29a9 | ||
|
|
15a48990b6 | ||
|
|
af0b898c2e | ||
|
|
ddf8384bc6 | ||
|
|
670f92f42b | ||
|
|
c81b0e3d2a | ||
|
|
f56d804d85 | ||
|
|
b57f08f22b | ||
|
|
34f3b8fdd0 | ||
|
|
0b3b49b4e0 | ||
|
|
fa96422702 | ||
|
|
e12168ed24 | ||
|
|
c0f2df8e0a | ||
|
|
8807ade98f | ||
|
|
13356ddbcc | ||
|
|
974033be80 | ||
|
|
8755fc7291 | ||
|
|
17c02fe759 | ||
|
|
4c7d18a772 | ||
|
|
b28b26c39a | ||
|
|
23aed605ec | ||
|
|
fdb8d565aa | ||
|
|
9b08296236 | ||
|
|
c82d8c63fa | ||
|
|
7a8989bbfc | ||
|
|
22c86074c8 | ||
|
|
ef9e449322 | ||
|
|
6b73195478 | ||
|
|
c7b9bf6a77 | ||
|
|
a84c91b259 | ||
|
|
e6566dfd67 | ||
|
|
d6419f32b8 | ||
|
|
60ed682577 | ||
|
|
6c1fa8c30b | ||
|
|
09167fe8ac | ||
|
|
eb7951818d | ||
|
|
6959656d51 | ||
|
|
f916b50491 | ||
|
|
7160e1d3e7 | ||
|
|
16369d50a7 | ||
|
|
35fc371222 | ||
|
|
4bbf4a5e79 | ||
|
|
e5bd79b011 | ||
|
|
2267b40bda | ||
|
|
b0b6016e12 | ||
|
|
c24265fe7e | ||
|
|
9d0102ac89 | ||
|
|
52b8b3ed8d | ||
|
|
380030c59a | ||
|
|
867a813328 | ||
|
|
ded3fa50a3 | ||
|
|
84653e8d9f | ||
|
|
ac0fd7138f | ||
|
|
c0f9f47b8c | ||
|
|
7e9d24a145 | ||
|
|
99405e6a4d | ||
|
|
7500a4853c | ||
|
|
54d192e026 | ||
|
|
e6a8eba72d | ||
|
|
4d604c8c9f | ||
|
|
b081e0046f | ||
|
|
baf82c0245 | ||
|
|
884a0a392d | ||
|
|
824f2a5652 | ||
|
|
b894acf1fc | ||
|
|
536856874c | ||
|
|
8d1aaef8b8 | ||
|
|
3216c4362e | ||
|
|
2e822e65fd | ||
|
|
41fde4db8c | ||
|
|
81125c3bd8 | ||
|
|
d8b0df6f5b | ||
|
|
99e3fdb180 | ||
|
|
64cfdc07cb | ||
|
|
59fbbdd9ce | ||
|
|
adedf58297 | ||
|
|
f13ea8aa75 | ||
|
|
109a4d52b5 | ||
|
|
7f2117e2cf | ||
|
|
dfab3e8829 | ||
|
|
822622f07a | ||
|
|
c4194e4f58 | ||
|
|
05c7ff4595 | ||
|
|
a6ac78c7c6 | ||
|
|
8479122e71 | ||
|
|
3d91bfea75 | ||
|
|
27e28b33ee | ||
|
|
eaf8ac7407 | ||
|
|
1e9d550bc6 | ||
|
|
0282fe9efd | ||
|
|
ad3d9c83fe | ||
|
|
910b923c5d | ||
|
|
c1f1e1396d | ||
|
|
3139dbdd39 | ||
|
|
cc6dcdcab9 | ||
|
|
57b83e5fb2 | ||
|
|
55fe73586e | ||
|
|
3be497344a | ||
|
|
05413689b9 | ||
|
|
50da223888 | ||
|
|
719b260cf1 | ||
|
|
3848c41494 | ||
|
|
da720cafd8 | ||
|
|
8f0b029308 | ||
|
|
901f4b789d | ||
|
|
6e6fef1b0e | ||
|
|
ae131a5c7c | ||
|
|
ed59e43e1d | ||
|
|
5e8f9ed1c7 | ||
|
|
9c31ff74c4 | ||
|
|
90c8fe0182 | ||
|
|
58db81c323 | ||
|
|
e867008819 | ||
|
|
9910e06b25 | ||
|
|
a3ece7ff4d | ||
|
|
00f0ca2060 | ||
|
|
35557ac21c | ||
|
|
54662f7ae1 | ||
|
|
9cdb2a8dbb | ||
|
|
ffdab20294 | ||
|
|
e91a69a756 | ||
|
|
7665076339 | ||
|
|
49ba2d84a0 | ||
|
|
0cec923e0a | ||
|
|
b97b001dad | ||
|
|
113e231abe | ||
|
|
f43ca7a554 | ||
|
|
3c2c215619 | ||
|
|
a2b3a004bf | ||
|
|
f650133f83 | ||
|
|
81f9d5baa5 | ||
|
|
3938a85ff8 | ||
|
|
84cb604b19 | ||
|
|
093504d9a0 | ||
|
|
70a93a746d | ||
|
|
4326efddbf | ||
|
|
6f51b543f0 | ||
|
|
3316e49ad3 | ||
|
|
0c4a91cd18 | ||
|
|
07c6c3ebac | ||
|
|
e3a4a07616 | ||
|
|
57467139e5 | ||
|
|
e8bc53f37b | ||
|
|
3b78515fd4 | ||
|
|
6b052c3af9 | ||
|
|
37e2d46d7d | ||
|
|
a389eb5a08 | ||
|
|
d57f174ca3 | ||
|
|
e6ae42b1eb | ||
|
|
d911b9c48d | ||
|
|
3b35b7f4fa | ||
|
|
a82fb33b31 | ||
|
|
5f35a90529 | ||
|
|
81c620b61b | ||
|
|
12866eb0d6 | ||
|
|
24b8d0666e | ||
|
|
0f2c7248c8 | ||
|
|
8cc4ad3ac5 | ||
|
|
9ba90e322e | ||
|
|
d9c6b40d8e | ||
|
|
d792e1f052 | ||
|
|
2d464f8c89 | ||
|
|
456a8e3553 | ||
|
|
db94696cf0 | ||
|
|
72bb656b9e | ||
|
|
e092026eb8 | ||
|
|
e5f5749d67 | ||
|
|
6e4f8ea7e4 | ||
|
|
5e8eff24d2 | ||
|
|
36f1801eb8 | ||
|
|
e3deee57ba | ||
|
|
404deb99f4 | ||
|
|
f594bc7aea | ||
|
|
9ed948523a | ||
|
|
72d7c2d558 | ||
|
|
d4a7582955 | ||
|
|
42ac1f0cb2 | ||
|
|
4af61d432f | ||
|
|
52bdaa7bf5 | ||
|
|
96b42238c5 | ||
|
|
c7bd259739 | ||
|
|
0dfc360b1e | ||
|
|
5e578e2e4e | ||
|
|
93111d495b | ||
|
|
b1bbe240d7 | ||
|
|
bbb58ff4c6 | ||
|
|
7a710bee17 | ||
|
|
bc3903d0e0 | ||
|
|
61c8326180 | ||
|
|
c5caa4b838 | ||
|
|
ebdff73c8c | ||
|
|
f78beb71f7 | ||
|
|
74210c7f46 | ||
|
|
eff4451cdd | ||
|
|
8d0fc3e639 | ||
|
|
3736da3f89 | ||
|
|
221e464df3 | ||
|
|
bac5253169 | ||
|
|
fc2ee5cae8 | ||
|
|
e11f83f34b | ||
|
|
6e7fb9a308 | ||
|
|
b156a62a44 | ||
|
|
990e77c55f | ||
|
|
c2fb529819 | ||
|
|
337bf3b944 | ||
|
|
a49b2f4f16 | ||
|
|
ddcf1a4b2e | ||
|
|
82a932c078 | ||
|
|
0781615117 | ||
|
|
9151fc1653 | ||
|
|
3a83b21ce1 | ||
|
|
cfc042d901 | ||
|
|
211ad9e7d9 | ||
|
|
437b259829 | ||
|
|
f524bba869 | ||
|
|
2f31fb5f17 | ||
|
|
c3567cb199 | ||
|
|
ae4c1d11f7 | ||
|
|
cbab451c1a | ||
|
|
9cec17ca26 | ||
|
|
d9c5d91b6f | ||
|
|
6e194c6ffe | ||
|
|
8f0c28037b | ||
|
|
31aabcabf7 | ||
|
|
ca9531d205 | ||
|
|
794c5eb6a0 | ||
|
|
c6cc125e22 | ||
|
|
528c1ed9ba | ||
|
|
52cc603245 | ||
|
|
5e55af2dce | ||
|
|
24ee7c8928 | ||
|
|
605df09ae1 | ||
|
|
4aebef900c | ||
|
|
59c5bef165 | ||
|
|
a18adf74bf | ||
|
|
6426ab1b7e | ||
|
|
7d1de58378 | ||
|
|
82a54378d8 | ||
|
|
e6e8fada8b | ||
|
|
7b541ac322 | ||
|
|
b0a2ea3970 | ||
|
|
cb439a09dd | ||
|
|
f87ee334c2 | ||
|
|
e8f8cf8543 | ||
|
|
8c93fb747b | ||
|
|
1701e929bc | ||
|
|
1bb3e04263 | ||
|
|
e91ae19ec4 | ||
|
|
818ae08c61 | ||
|
|
15f32f2ca1 | ||
|
|
59aa62ea5c | ||
|
|
b4c292ddd0 | ||
|
|
a25655446a | ||
|
|
cf3d59bb2e | ||
|
|
f80287166e | ||
|
|
329dc40b98 | ||
|
|
8328c34a3e | ||
|
|
6c309d3bc9 | ||
|
|
24b49eee83 | ||
|
|
715fab340f | ||
|
|
8231e84d15 | ||
|
|
8dc91a7a5c | ||
|
|
4c195b1a06 | ||
|
|
c6fb6bf5f8 | ||
|
|
ed6161b80c | ||
|
|
93424e4565 | ||
|
|
ca00e42f1d | ||
|
|
357ee1c632 | ||
|
|
cb0a3eaade | ||
|
|
a20d34b8aa | ||
|
|
329910a620 | ||
|
|
c374d04b0d | ||
|
|
b68cf16c85 | ||
|
|
391c4cf099 | ||
|
|
8260599e98 | ||
|
|
3433c8b8a5 | ||
|
|
e53bc780e4 | ||
|
|
53346dbaa6 | ||
|
|
99994ea245 | ||
|
|
3ffe02a2fe | ||
|
|
9b77502360 | ||
|
|
96a97328cf | ||
|
|
1a400383c0 | ||
|
|
4f8ab19355 | ||
|
|
8919677faf | ||
|
|
858b185a0e | ||
|
|
bc738cea32 | ||
|
|
c2196f7d3a | ||
|
|
d45c27fdbd | ||
|
|
173084de19 | ||
|
|
fd2c5fa247 | ||
|
|
73e0e9bdff | ||
|
|
4442414d74 | ||
|
|
8dbf9005f0 | ||
|
|
0c3da1b498 | ||
|
|
278f5818b7 | ||
|
|
c2e85202c7 | ||
|
|
b021b12043 | ||
|
|
89a0c9f4b3 | ||
|
|
259629c300 | ||
|
|
1ce2baf7a2 | ||
|
|
4576a42a0f | ||
|
|
4fba351b92 | ||
|
|
706737245a | ||
|
|
0dbdf49075 | ||
|
|
641075539c | ||
|
|
9428d99991 | ||
|
|
f3cf2e94c4 | ||
|
|
85f13442d2 | ||
|
|
f478a65635 | ||
|
|
eca44600c5 | ||
|
|
d7a4652554 | ||
|
|
d85cdd1946 | ||
|
|
ce3aae1501 | ||
|
|
90c4f9d463 | ||
|
|
93ccd81d86 | ||
|
|
9c5a70ab6c | ||
|
|
5ef58144cb | ||
|
|
41ddbdfd90 | ||
|
|
66924c85a3 | ||
|
|
a4c3fc138c | ||
|
|
62ed2a07a7 | ||
|
|
e7aec9e872 | ||
|
|
b065c3e11e | ||
|
|
88b357c453 | ||
|
|
bb7299c04a | ||
|
|
7a5bddfd15 | ||
|
|
50fe769f82 | ||
|
|
29d39d1adf | ||
|
|
8dde7f3975 | ||
|
|
0cd038273e | ||
|
|
1f5bfe80ed | ||
|
|
4d2611ad8a | ||
|
|
21a97dad31 | ||
|
|
338a01ca6d | ||
|
|
392396ded4 | ||
|
|
a336e0edb7 | ||
|
|
9426fefa00 | ||
|
|
812192eef5 | ||
|
|
b8c8e80965 | ||
|
|
77fd5d8414 | ||
|
|
82050ed371 | ||
|
|
a7381a9413 | ||
|
|
b932783d4d | ||
|
|
0b51f25034 | ||
|
|
d6a182fb5d | ||
|
|
e8635adb21 | ||
|
|
f242e0fd0c | ||
|
|
67b5f6b838 | ||
|
|
9d16f17463 | ||
|
|
f44f5b0db0 | ||
|
|
39ace5fc45 | ||
|
|
0601d6a0c5 | ||
|
|
11869ff872 | ||
|
|
6753605807 | ||
|
|
918db85737 | ||
|
|
1184de8352 | ||
|
|
2470fde5d9 | ||
|
|
abfff43976 | ||
|
|
230687a501 | ||
|
|
5ff8908ff3 | ||
|
|
882e09e50b | ||
|
|
6753f4a7cb | ||
|
|
1dc63dbea6 | ||
|
|
b9dfae4722 | ||
|
|
70412612c7 | ||
|
|
cd741c368c | ||
|
|
16a7bef456 | ||
|
|
85f62728c6 | ||
|
|
092dc96e6c | ||
|
|
2bb20caa5f | ||
|
|
00bcf935e8 | ||
|
|
3751372396 | ||
|
|
e6afeca92f | ||
|
|
35b9307af6 | ||
|
|
567f728579 | ||
|
|
404c5c29a1 | ||
|
|
63712ba6c6 | ||
|
|
ef62d47dc7 | ||
|
|
a4594857fc | ||
|
|
e77572b753 | ||
|
|
8c84c5ff66 | ||
|
|
5d8beaf0ed | ||
|
|
ac405f3d79 | ||
|
|
2e30553310 | ||
|
|
85a61772d8 | ||
|
|
4007f8726d | ||
|
|
a097f7791b | ||
|
|
3d4d89b2c0 | ||
|
|
e461234865 | ||
|
|
2c1d5f9844 | ||
|
|
c4b682b983 | ||
|
|
de0b784d5a | ||
|
|
5f38afdfc7 | ||
|
|
ac67c6e34b | ||
|
|
72deb53832 | ||
|
|
7c87253fd8 | ||
|
|
1136aedd08 | ||
|
|
24e1b56268 | ||
|
|
eef6a79b35 | ||
|
|
556a36cbd7 | ||
|
|
8aa490d6b7 | ||
|
|
d9d085da10 | ||
|
|
d88d720577 | ||
|
|
1d670ae744 | ||
|
|
35ad6f52c1 | ||
|
|
b61bae7640 | ||
|
|
8b7abace8b | ||
|
|
5cf98d9564 | ||
|
|
973a961cb5 | ||
|
|
868d0cb957 | ||
|
|
497f3a3832 | ||
|
|
9843f41bce | ||
|
|
e54fefc2b7 | ||
|
|
90c0889533 | ||
|
|
6696e82ce7 | ||
|
|
dcc55d53db | ||
|
|
92000e81b8 | ||
|
|
125175ae25 | ||
|
|
f60e548a0d | ||
|
|
04dc16a6b1 | ||
|
|
27b90e38db | ||
|
|
7e5ce3ba48 | ||
|
|
f5f7cfdc8f | ||
|
|
3e1a562312 | ||
|
|
ce4d962faa | ||
|
|
b9816a97fc | ||
|
|
f7b9c30456 | ||
|
|
884620a38a | ||
|
|
7503a41773 | ||
|
|
9a5fc6b4a3 | ||
|
|
a31aeed167 | ||
|
|
71f542a951 | ||
|
|
322bd48788 | ||
|
|
b752fa59d4 | ||
|
|
d53e4cc426 | ||
|
|
ee4b7fa3a1 | ||
|
|
d6f02c86d9 | ||
|
|
62efde8e3c | ||
|
|
bda1d94d49 | ||
|
|
3f472039c5 | ||
|
|
912ef34206 | ||
|
|
9c88a48a73 | ||
|
|
4bf5cc9a9a | ||
|
|
08834e2b03 | ||
|
|
8020a111df | ||
|
|
86fb547f7c | ||
|
|
b9556c7c44 | ||
|
|
7bdb106b1b | ||
|
|
2b191cd7f4 | ||
|
|
774f0a4e60 | ||
|
|
faf11efa72 | ||
|
|
5a99142b41 | ||
|
|
a3aca0242a | ||
|
|
72f276fab3 | ||
|
|
21139945df | ||
|
|
900bd2f477 | ||
|
|
29d4a5af44 | ||
|
|
dd9b7ed6a7 | ||
|
|
09ff74be62 | ||
|
|
a94ebfea11 | ||
|
|
8f5fe1d123 | ||
|
|
d4fb58efa3 | ||
|
|
ce900346cc | ||
|
|
7cb64e465f | ||
|
|
eb70c9f5b9 | ||
|
|
a28405700e | ||
|
|
f8f4d94d7a | ||
|
|
32dfb522d6 | ||
|
|
c61c707aa5 | ||
|
|
60d10848c8 | ||
|
|
dcd6b530f9 | ||
|
|
419f0742a0 | ||
|
|
c99174798b | ||
|
|
8df2a4b511 | ||
|
|
c174cf6830 | ||
|
|
5eebd65366 | ||
|
|
625f5323c0 | ||
|
|
e05a32cead | ||
|
|
c69af5d1e5 | ||
|
|
1ac2ee8043 | ||
|
|
36af1c1c73 | ||
|
|
e2fa087002 | ||
|
|
df02bfbad2 | ||
|
|
fecb63843e | ||
|
|
b33e2d09d3 | ||
|
|
f8054aa21a | ||
|
|
8f3a2acc54 | ||
|
|
d1a20908b8 | ||
|
|
dd781f7368 | ||
|
|
9bcc43c4c1 | ||
|
|
77c83af17d | ||
|
|
574bd2db99 | ||
|
|
a76f37da96 | ||
|
|
9e75f3ec0a | ||
|
|
4d42d45897 | ||
|
|
a4b4bfda73 | ||
|
|
1bcdd3a57e | ||
|
|
297a3a1bc9 | ||
|
|
8d01e8c978 | ||
|
|
6be28aa303 | ||
|
|
5e38310515 | ||
|
|
ddfed65485 | ||
|
|
2a16d8bfa8 | ||
|
|
6a40a50a29 | ||
|
|
b2924f68c0 | ||
|
|
41ffe36636 | ||
|
|
24edc72252 | ||
|
|
83b38a26a0 | ||
|
|
914d785e3b | ||
|
|
f99f642fa8 | ||
|
|
e0bf3667e3 | ||
|
|
a24ca50fed | ||
|
|
51e9f37252 | ||
|
|
453900c884 | ||
|
|
4696459d2d | ||
|
|
ad1e3231e5 | ||
|
|
2ef7eb1826 | ||
|
|
fe86019f9a | ||
|
|
9dbb18219f | ||
|
|
451a977de0 | ||
|
|
e604929a4c | ||
|
|
9d591f9f7c | ||
|
|
f8ad915100 | ||
|
|
cbbabe6920 | ||
|
|
81fe460194 | ||
|
|
b894f996c0 | ||
|
|
1ce09847d9 | ||
|
|
722d401394 | ||
|
|
e6f04d5ef9 | ||
|
|
b8e3ecbf00 | ||
|
|
d189387c24 | ||
|
|
9e96ddc5ae | ||
|
|
543bd189af | ||
|
|
43291aa723 | ||
|
|
d0589285f7 | ||
|
|
d079aaa083 | ||
|
|
6c65977e0d | ||
|
|
1b5d786cf5 | ||
|
|
4cf00645bd | ||
|
|
e9149cfc3c | ||
|
|
a5c8111076 | ||
|
|
c3576f712d | ||
|
|
410e6a59b7 | ||
|
|
bd2b2fb75a | ||
|
|
7ae318efd0 | ||
|
|
73e9d56647 | ||
|
|
f87a752b63 | ||
|
|
ae2fec30c3 | ||
|
|
1af5564cbe | ||
|
|
a8f057a701 | ||
|
|
7f3dd38ccc | ||
|
|
8e9adefcd5 | ||
|
|
d276f9700f | ||
|
|
4f111659ec | ||
|
|
eaf330f2a8 | ||
|
|
cdaeb74dc7 | ||
|
|
fbaac46604 | ||
|
|
7f6210ee90 | ||
|
|
63f6e6079a | ||
|
|
d4fd6caae0 | ||
|
|
fd3c18b6fd | ||
|
|
725f427f25 | ||
|
|
32b3e91ef7 | ||
|
|
b7e4602268 | ||
|
|
4a98d4db93 | ||
|
|
9d6bf373be | ||
|
|
cff35c4987 | ||
|
|
d594f84b8f | ||
|
|
f8f01c336c | ||
|
|
12e3665df3 | ||
|
|
fa4778b9fc | ||
|
|
66d297d420 | ||
|
|
56251c11f3 | ||
|
|
40bf9a179b | ||
|
|
095aba0b9f | ||
|
|
4270136598 | ||
|
|
f73d7d2dce | ||
|
|
567566da08 | ||
|
|
30a9ab749d | ||
|
|
8160a96b27 | ||
|
|
10414d3e6c | ||
|
|
1d96c09094 | ||
|
|
e7112fbc6a | ||
|
|
b79761b7eb | ||
|
|
3381899c69 | ||
|
|
c7cf5eabc1 | ||
|
|
d88fa5cf8e | ||
|
|
2ed0e3d737 | ||
|
|
506a40cac1 | ||
|
|
447739fcef | ||
|
|
e60f6f4a6e | ||
|
|
7df35d0da0 | ||
|
|
71b035ece1 | ||
|
|
86a134235e | ||
|
|
24cd0da7fb | ||
|
|
762833a663 | ||
|
|
636d479e5f | ||
|
|
f2184f26fa | ||
|
|
e1686eef7c | ||
|
|
314893982e | ||
|
|
9ab6c30a3d | ||
|
|
ddf94291d4 | ||
|
|
5d1038c512 | ||
|
|
2e40c88d50 | ||
|
|
2bcba57757 | ||
|
|
37330e5e2b | ||
|
|
b4411cf2db | ||
|
|
65d1ae083c | ||
|
|
0b8faa3918 | ||
|
|
f077c7e33b | ||
|
|
9d7410d22e | ||
|
|
e295730d0e | ||
|
|
868327ee14 | ||
|
|
f5430b16bc | ||
|
|
2446695113 | ||
|
|
e0c6cca65c | ||
|
|
84ed4cd331 | ||
|
|
f6d50f790e | ||
|
|
d3c3d23d1e | ||
|
|
33752c2b55 | ||
|
|
26759249ca | ||
|
|
8b4cbbe7b3 | ||
|
|
be71f9fdc4 | ||
|
|
05c1e7ecc2 |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -28,7 +28,7 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: ${{ matrix.system.shell }}
|
shell: ${{ matrix.system.shell }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit externals
|
spack -d audit externals
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
flags: unittests,audits
|
||||||
|
|||||||
42
.github/workflows/bootstrap.yml
vendored
42
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
@@ -53,27 +53,31 @@ jobs:
|
|||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison tree
|
brew install cmake bison tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
|
env:
|
||||||
|
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||||
|
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||||
|
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
spack bootstrap disable github-actions-v0.4
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||||
|
|
||||||
gnupg-sources:
|
gnupg-sources:
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
@@ -83,14 +87,14 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: brew install tree gawk
|
|
||||||
- name: Remove system executables
|
|
||||||
run: |
|
run: |
|
||||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
brew install tree gawk
|
||||||
sudo rm $(command -v gpg gpg2 patchelf)
|
sudo rm -rf $(command -v gpg gpg2)
|
||||||
done
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
|
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
@@ -110,14 +114,16 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: brew install tree
|
|
||||||
- name: Remove system executables
|
|
||||||
run: |
|
run: |
|
||||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
brew install tree
|
||||||
sudo rm $(command -v gpg gpg2 patchelf)
|
# Remove GnuPG since we want to bootstrap it
|
||||||
done
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
|||||||
11
.github/workflows/build-containers.yml
vendored
11
.github/workflows/build-containers.yml
vendored
@@ -40,8 +40,7 @@ jobs:
|
|||||||
# 1: Platforms to build for
|
# 1: Platforms to build for
|
||||||
# 2: Base image (e.g. ubuntu:22.04)
|
# 2: Base image (e.g. ubuntu:22.04)
|
||||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
|
||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||||
@@ -56,7 +55,7 @@ jobs:
|
|||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
|
||||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
@@ -94,10 +93,10 @@ jobs:
|
|||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||||
@@ -114,7 +113,7 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
uses: docker/build-push-action@1a162644f9a7e87d8f4b053101d1d9a712edc18c
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
|
|||||||
9
.github/workflows/ci.yaml
vendored
9
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -53,6 +53,13 @@ jobs:
|
|||||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||||
- 'var/spack/repos/builtin/packages/python/**'
|
- 'var/spack/repos/builtin/packages/python/**'
|
||||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||||
|
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||||
|
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||||
|
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||||
|
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||||
|
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||||
|
- 'var/spack/repos/builtin/packages/npth/**'
|
||||||
|
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||||
- 'lib/spack/**'
|
- 'lib/spack/**'
|
||||||
- 'share/spack/**'
|
- 'share/spack/**'
|
||||||
- '.github/workflows/bootstrap.yml'
|
- '.github/workflows/bootstrap.yml'
|
||||||
|
|||||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
|||||||
4
.github/workflows/style/requirements.txt
vendored
4
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
|||||||
black==24.4.2
|
black==24.4.2
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.0.0
|
flake8==7.1.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.16.21.9
|
types-six==1.16.21.20240513
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
38
.github/workflows/unit_tests.yaml
vendored
38
.github/workflows/unit_tests.yaml
vendored
@@ -14,14 +14,14 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-22.04]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||||
concretizer: ['clingo']
|
concretizer: ['clingo']
|
||||||
on_develop:
|
on_develop:
|
||||||
- ${{ github.ref == 'refs/heads/develop' }}
|
- ${{ github.ref == 'refs/heads/develop' }}
|
||||||
include:
|
include:
|
||||||
- python-version: '3.11'
|
- python-version: '3.11'
|
||||||
os: ubuntu-20.04
|
os: ubuntu-latest
|
||||||
concretizer: original
|
concretizer: original
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
- python-version: '3.6'
|
- python-version: '3.6'
|
||||||
@@ -30,28 +30,28 @@ jobs:
|
|||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
exclude:
|
exclude:
|
||||||
- python-version: '3.7'
|
- python-version: '3.7'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
os: ubuntu-22.04
|
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.8'
|
- python-version: '3.8'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
os: ubuntu-22.04
|
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.9'
|
- python-version: '3.9'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
os: ubuntu-22.04
|
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.10'
|
- python-version: '3.10'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
os: ubuntu-22.04
|
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.11'
|
- python-version: '3.11'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
os: ubuntu-22.04
|
|
||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -91,16 +91,16 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -124,7 +124,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -141,7 +141,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -158,9 +158,9 @@ jobs:
|
|||||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||||
# Test for the clingo based solver (using clingo-cffi)
|
# Test for the clingo based solver (using clingo-cffi)
|
||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -185,7 +185,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -198,7 +198,7 @@ jobs:
|
|||||||
os: [macos-13, macos-14]
|
os: [macos-13, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -223,7 +223,7 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -254,7 +254,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
@@ -35,7 +35,7 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
|||||||
@@ -14,26 +14,3 @@ sphinx:
|
|||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- requirements: lib/spack/docs/requirements.txt
|
- requirements: lib/spack/docs/requirements.txt
|
||||||
|
|
||||||
search:
|
|
||||||
ranking:
|
|
||||||
spack.html: -10
|
|
||||||
spack.*.html: -10
|
|
||||||
llnl.html: -10
|
|
||||||
llnl.*.html: -10
|
|
||||||
_modules/*: -10
|
|
||||||
command_index.html: -9
|
|
||||||
basic_usage.html: 5
|
|
||||||
configuration.html: 5
|
|
||||||
config_yaml.html: 5
|
|
||||||
packages_yaml.html: 5
|
|
||||||
build_settings.html: 5
|
|
||||||
environments.html: 5
|
|
||||||
containers.html: 5
|
|
||||||
mirrors.html: 5
|
|
||||||
module_file_support.html: 5
|
|
||||||
repositories.html: 5
|
|
||||||
binary_caches.html: 5
|
|
||||||
chain.html: 5
|
|
||||||
pipelines.html: 5
|
|
||||||
packaging_guide.html: 5
|
|
||||||
|
|||||||
102
CHANGELOG.md
102
CHANGELOG.md
@@ -1,102 +1,3 @@
|
|||||||
# v0.22.5 (2025-02-21)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
- Continue to mark non-roots as implicitly installed on partial env installs (#47183)
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
- v0.22.4 is skipped to do an error in the release process
|
|
||||||
|
|
||||||
# v0.22.3 (2024-11-18)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
- Forward compatibility with Python 3.13 (#46775, #46983, #47035, #47175)
|
|
||||||
- `archspec` was updated to v0.2.5 (#46503, #46958)
|
|
||||||
- Fix path to Spack in `spack env depfile` makefile (#46966)
|
|
||||||
- Fix `glibc` detection in Chinese locales (#47434)
|
|
||||||
- Fix pickle round-trip of specs propagating variants (#47351)
|
|
||||||
- Fix a bug where concurrent spack install commands would not always update explicits correctly
|
|
||||||
(#47358)
|
|
||||||
- Fix a bug where autopush would run before all post install hooks modifying the install prefix
|
|
||||||
had run (#47329)
|
|
||||||
- Fix `spack find -u` (#47102)
|
|
||||||
- Fix a bug where sometimes the wrong Python interpreter was used for build dependencies such as
|
|
||||||
`py-setuptools` (#46980)
|
|
||||||
- Fix default config errors found by `spack audit externals` (#47308)
|
|
||||||
- Fix duplicate printing of external roots in installer (#44917)
|
|
||||||
- Fix modules schema in `compilers.yaml` (#47197)
|
|
||||||
- Reduce the size of generated YAML for Gitlab CI (#44995)
|
|
||||||
- Handle missing metadata file gracefully in bootstrap (#47278)
|
|
||||||
- Show underlying errors on fetch failure (#45714)
|
|
||||||
- Recognize `.` and `..` as paths instead of names in buildcache commands (#47105)
|
|
||||||
- Documentation and style (#46991, #47107, #47110, #47111, #47346, #47307, #47309, #47328, #47160,
|
|
||||||
#47402, #47557, #46709, #47080)
|
|
||||||
- Tests and CI fixes (#47165, #46711)
|
|
||||||
|
|
||||||
## Package updates
|
|
||||||
- ffmpeg: fix hash of patch (#45574)
|
|
||||||
|
|
||||||
# v0.22.2 (2024-09-21)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
|
||||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
|
||||||
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
|
||||||
- Support macOS Sequoia (#45018, #45127)
|
|
||||||
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
|
||||||
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
|
||||||
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
|
||||||
- Do not halt concretization on unknown variants in externals (#45326)
|
|
||||||
- Improve validation of `develop` config section (#46485)
|
|
||||||
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
|
||||||
- Improve backwards compatibility in `include_concrete` (#45766)
|
|
||||||
- Fix issue where package tags were sometimes repeated (#45160)
|
|
||||||
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
|
||||||
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
|
||||||
- Remove debug statements in package hash computation (#45235)
|
|
||||||
- Remove redundant clingo warnings (#45269)
|
|
||||||
- Remove hard-coded layout version (#45645)
|
|
||||||
- Do not initialize previous store state in `use_store` (#45268)
|
|
||||||
- Docs improvements (#46475)
|
|
||||||
|
|
||||||
## Package updates
|
|
||||||
- `chapel` major update (#42197, #44931, #45304)
|
|
||||||
|
|
||||||
# v0.22.1 (2024-07-04)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
- Fix reuse of externals on Linux (#44316)
|
|
||||||
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
|
||||||
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
|
||||||
- Improve version detection of glibc (#44154)
|
|
||||||
- Improve heuristics for solver (#44893, #44976, #45023)
|
|
||||||
- Make strong preferences override reuse (#44373)
|
|
||||||
- Reduce verbosity when C compiler is missing (#44182)
|
|
||||||
- Make missing ccache executable an error when required (#44740)
|
|
||||||
- Make every environment view containing `python` a `venv` (#44382)
|
|
||||||
- Fix external detection for compilers with os but no target (#44156)
|
|
||||||
- Fix version optimization for roots (#44272)
|
|
||||||
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
|
||||||
- Apply fetched patches to develop specs (#44950)
|
|
||||||
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
|
||||||
- Fix issue with long filenames in build caches on Windows (#43851)
|
|
||||||
- Fix formatting issue in `spack audit` (#45045)
|
|
||||||
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
|
||||||
|
|
||||||
## Package updates
|
|
||||||
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
|
||||||
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
|
||||||
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
|
||||||
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
|
||||||
- Remove mesa18 and libosmesa (#44264)
|
|
||||||
- Enforce consistency of `gl` providers (#44307)
|
|
||||||
- Require libiconv for iconv (#44335, #45026).
|
|
||||||
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
|
||||||
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
|
||||||
- py-matplotlib: qualify when to do a post install (#44191)
|
|
||||||
- rust: fix v1.78.0 instructions (#44127)
|
|
||||||
- suite-sparse: improve setting of the `libs` property (#44214)
|
|
||||||
- netlib-lapack: provide blas and lapack together (#44981)
|
|
||||||
|
|
||||||
|
|
||||||
# v0.22.0 (2024-05-12)
|
# v0.22.0 (2024-05-12)
|
||||||
|
|
||||||
@@ -418,7 +319,6 @@
|
|||||||
* 344 committers to packages
|
* 344 committers to packages
|
||||||
* 45 committers to core
|
* 45 committers to core
|
||||||
|
|
||||||
|
|
||||||
# v0.21.2 (2024-03-01)
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
## Bugfixes
|
## Bugfixes
|
||||||
@@ -448,7 +348,7 @@
|
|||||||
- spack graph: fix coloring with environments (#41240)
|
- spack graph: fix coloring with environments (#41240)
|
||||||
- spack info: sort variants in --variants-by-name (#41389)
|
- spack info: sort variants in --variants-by-name (#41389)
|
||||||
- Spec.format: error on old style format strings (#41934)
|
- Spec.format: error on old style format strings (#41934)
|
||||||
- ASP-based solver:
|
- ASP-based solver:
|
||||||
- fix infinite recursion when computing concretization errors (#41061)
|
- fix infinite recursion when computing concretization errors (#41061)
|
||||||
- don't error for type mismatch on preferences (#41138)
|
- don't error for type mismatch on preferences (#41138)
|
||||||
- don't emit spurious debug output (#41218)
|
- don't emit spurious debug output (#41218)
|
||||||
|
|||||||
@@ -32,7 +32,7 @@
|
|||||||
|
|
||||||
Spack is a multi-platform package manager that builds and installs
|
Spack is a multi-platform package manager that builds and installs
|
||||||
multiple versions and configurations of software. It works on Linux,
|
multiple versions and configurations of software. It works on Linux,
|
||||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||||
new version of a package does not break existing installations, so many
|
new version of a package does not break existing installations, so many
|
||||||
configurations of the same package can coexist.
|
configurations of the same package can coexist.
|
||||||
|
|
||||||
|
|||||||
@@ -22,4 +22,4 @@
|
|||||||
#
|
#
|
||||||
# This is compatible across platforms.
|
# This is compatible across platforms.
|
||||||
#
|
#
|
||||||
exec /usr/bin/env spack python "$@"
|
exec spack python "$@"
|
||||||
|
|||||||
@@ -188,25 +188,27 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
|||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|
||||||
:case_load
|
:case_load
|
||||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
if NOT defined _sp_args (
|
||||||
if defined _sp_args (
|
exit /B 0
|
||||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
)
|
||||||
goto :default_case
|
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
:: If args contain --bat, or -h/--help: just execute.
|
||||||
goto :default_case
|
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
goto :default_case
|
||||||
goto :default_case
|
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||||
)
|
goto :default_case
|
||||||
|
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||||
|
goto :default_case
|
||||||
|
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
||||||
|
goto :default_case
|
||||||
)
|
)
|
||||||
|
|
||||||
for /f "tokens=* USEBACKQ" %%I in (
|
for /f "tokens=* USEBACKQ" %%I in (
|
||||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
||||||
|
) do %%I
|
||||||
|
|
||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|
||||||
:case_unload
|
|
||||||
goto :case_load
|
|
||||||
|
|
||||||
:default_case
|
:default_case
|
||||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|||||||
@@ -42,8 +42,8 @@ concretizer:
|
|||||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||||
os_compatible: {}
|
os_compatible: {}
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
# -------------------------------------------------------------------------
|
|
||||||
# This is the default configuration for Spack's module file generation.
|
|
||||||
#
|
|
||||||
# Settings here are versioned with Spack and are intended to provide
|
|
||||||
# sensible defaults out of the box. Spack maintainers should edit this
|
|
||||||
# file to keep it current.
|
|
||||||
#
|
|
||||||
# Users can override these settings by editing the following files.
|
|
||||||
#
|
|
||||||
# Per-spack-instance settings (overrides defaults):
|
|
||||||
# $SPACK_ROOT/etc/spack/modules.yaml
|
|
||||||
#
|
|
||||||
# Per-user settings (overrides default and site settings):
|
|
||||||
# ~/.spack/modules.yaml
|
|
||||||
# -------------------------------------------------------------------------
|
|
||||||
modules: {}
|
|
||||||
@@ -37,9 +37,9 @@ packages:
|
|||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
libgfortran: [gcc-runtime]
|
libgfortran: [ gcc-runtime ]
|
||||||
libglx: [mesa+glx]
|
libglx: [mesa+glx]
|
||||||
libifcore: [intel-oneapi-runtime]
|
libifcore: [ intel-oneapi-runtime ]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
luajit: [lua-luajit-openresty, lua-luajit]
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
|
|||||||
@@ -1433,22 +1433,12 @@ the reserved keywords ``platform``, ``os`` and ``target``:
|
|||||||
$ spack install libelf os=ubuntu18.04
|
$ spack install libelf os=ubuntu18.04
|
||||||
$ spack install libelf target=broadwell
|
$ spack install libelf target=broadwell
|
||||||
|
|
||||||
or together by using the reserved keyword ``arch``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install libelf arch=cray-CNL10-haswell
|
|
||||||
|
|
||||||
Normally users don't have to bother specifying the architecture if they
|
Normally users don't have to bother specifying the architecture if they
|
||||||
are installing software for their current host, as in that case the
|
are installing software for their current host, as in that case the
|
||||||
values will be detected automatically. If you need fine-grained control
|
values will be detected automatically. If you need fine-grained control
|
||||||
over which packages use which targets (or over *all* packages' default
|
over which packages use which targets (or over *all* packages' default
|
||||||
target), see :ref:`package-preferences`.
|
target), see :ref:`package-preferences`.
|
||||||
|
|
||||||
.. admonition:: Cray machines
|
|
||||||
|
|
||||||
The situation is a little bit different for Cray machines and a detailed
|
|
||||||
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
|
|
||||||
|
|
||||||
.. _support-for-microarchitectures:
|
.. _support-for-microarchitectures:
|
||||||
|
|
||||||
|
|||||||
@@ -147,6 +147,15 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
|||||||
def autoreconf(self, spec, prefix):
|
def autoreconf(self, spec, prefix):
|
||||||
which("bash")("autogen.sh")
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
|
If the ``package.py`` has build instructions in a separate
|
||||||
|
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class AutotoolsBuilder(AutotoolsBuilder):
|
||||||
|
def autoreconf(self, pkg, spec, prefix):
|
||||||
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
patching configure or Makefile.in files
|
patching configure or Makefile.in files
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
|||||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||||
list of available oneAPI packages, or use::
|
list of available oneAPI packages, or use::
|
||||||
|
|
||||||
spack list -d oneAPI
|
spack list -d oneAPI
|
||||||
|
|||||||
@@ -5,13 +5,14 @@
|
|||||||
|
|
||||||
.. chain:
|
.. chain:
|
||||||
|
|
||||||
=============================================
|
============================
|
||||||
Chaining Spack Installations (upstreams.yaml)
|
Chaining Spack Installations
|
||||||
=============================================
|
============================
|
||||||
|
|
||||||
You can point your Spack installation to another installation to use any
|
You can point your Spack installation to another installation to use any
|
||||||
packages that are installed there. To register the other Spack instance,
|
packages that are installed there. To register the other Spack instance,
|
||||||
you can add it as an entry to ``upstreams.yaml``:
|
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||||
|
:ref:`configuration-scopes`:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -22,7 +23,8 @@ you can add it as an entry to ``upstreams.yaml``:
|
|||||||
install_tree: /path/to/another/spack/opt/spack
|
install_tree: /path/to/another/spack/opt/spack
|
||||||
|
|
||||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||||
Spack base directory.
|
Spack base directory, or the location of the ``install_tree`` defined
|
||||||
|
in :ref:`config.yaml <config-yaml>`.
|
||||||
|
|
||||||
Once the upstream Spack instance has been added, ``spack find`` will
|
Once the upstream Spack instance has been added, ``spack find`` will
|
||||||
automatically check the upstream instance when querying installed packages,
|
automatically check the upstream instance when querying installed packages,
|
||||||
|
|||||||
@@ -203,12 +203,9 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Ubuntu 24.04
|
* - Ubuntu 24.04
|
||||||
- ``ubuntu:24.04``
|
- ``ubuntu:24.04``
|
||||||
- ``spack/ubuntu-noble``
|
- ``spack/ubuntu-noble``
|
||||||
* - CentOS 7
|
* - CentOS Stream9
|
||||||
- ``centos:7``
|
- ``quay.io/centos/centos:stream9``
|
||||||
- ``spack/centos7``
|
- ``spack/centos-stream9``
|
||||||
* - CentOS Stream
|
|
||||||
- ``quay.io/centos/centos:stream``
|
|
||||||
- ``spack/centos-stream``
|
|
||||||
* - openSUSE Leap
|
* - openSUSE Leap
|
||||||
- ``opensuse/leap``
|
- ``opensuse/leap``
|
||||||
- ``spack/leap15``
|
- ``spack/leap15``
|
||||||
@@ -273,7 +270,7 @@ under the ``container`` attribute of environments:
|
|||||||
# Sets the base images for the stages where Spack builds the
|
# Sets the base images for the stages where Spack builds the
|
||||||
# software or where the software gets installed after being built..
|
# software or where the software gets installed after being built..
|
||||||
images:
|
images:
|
||||||
os: "centos:7"
|
os: "almalinux:9"
|
||||||
spack: develop
|
spack: develop
|
||||||
|
|
||||||
# Whether or not to strip binaries
|
# Whether or not to strip binaries
|
||||||
@@ -324,32 +321,33 @@ following ``spack.yaml``:
|
|||||||
|
|
||||||
container:
|
container:
|
||||||
images:
|
images:
|
||||||
os: centos:7
|
os: almalinux:9
|
||||||
spack: 0.15.4
|
spack: 0.22.0
|
||||||
|
|
||||||
uses ``spack/centos7:0.15.4`` and ``centos:7`` for the stages where the
|
uses ``spack/almalinux9:0.22.0`` and ``almalinux:9`` for the stages where the
|
||||||
software is respectively built and installed:
|
software is respectively built and installed:
|
||||||
|
|
||||||
.. code-block:: docker
|
.. code-block:: docker
|
||||||
|
|
||||||
# Build stage with Spack pre-installed and ready to be used
|
# Build stage with Spack pre-installed and ready to be used
|
||||||
FROM spack/centos7:0.15.4 as builder
|
FROM spack/almalinux9:0.22.0 AS builder
|
||||||
|
|
||||||
# What we want to install and how we want to install it
|
# What we want to install and how we want to install it
|
||||||
# is specified in a manifest file (spack.yaml)
|
# is specified in a manifest file (spack.yaml)
|
||||||
RUN mkdir /opt/spack-environment \
|
RUN mkdir -p /opt/spack-environment && \
|
||||||
&& (echo "spack:" \
|
set -o noclobber \
|
||||||
&& echo " specs:" \
|
&& (echo spack: \
|
||||||
&& echo " - gromacs+mpi" \
|
&& echo ' specs:' \
|
||||||
&& echo " - mpich" \
|
&& echo ' - gromacs+mpi' \
|
||||||
&& echo " concretizer:" \
|
&& echo ' - mpich' \
|
||||||
&& echo " unify: true" \
|
&& echo ' concretizer:' \
|
||||||
&& echo " config:" \
|
&& echo ' unify: true' \
|
||||||
&& echo " install_tree: /opt/software" \
|
&& echo ' config:' \
|
||||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
&& echo ' install_tree: /opt/software' \
|
||||||
|
&& echo ' view: /opt/views/view') > /opt/spack-environment/spack.yaml
|
||||||
[ ... ]
|
[ ... ]
|
||||||
# Bare OS image to run the installed executables
|
# Bare OS image to run the installed executables
|
||||||
FROM centos:7
|
FROM quay.io/almalinuxorg/almalinux:9
|
||||||
|
|
||||||
COPY --from=builder /opt/spack-environment /opt/spack-environment
|
COPY --from=builder /opt/spack-environment /opt/spack-environment
|
||||||
COPY --from=builder /opt/software /opt/software
|
COPY --from=builder /opt/software /opt/software
|
||||||
|
|||||||
@@ -184,7 +184,7 @@ Style Tests
|
|||||||
|
|
||||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||||
a series of style guides for Python that provide suggestions for everything
|
a series of style guides for Python that provide suggestions for everything
|
||||||
from variable naming to indentation. In order to limit the number of PRs that
|
from variable naming to indentation. In order to limit the number of PRs that
|
||||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||||
|
|||||||
@@ -716,27 +716,27 @@ Release branches
|
|||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
There are currently two types of Spack releases: :ref:`major releases
|
There are currently two types of Spack releases: :ref:`major releases
|
||||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||||
diagram of how Spack release branches work::
|
diagram of how Spack release branches work::
|
||||||
|
|
||||||
o branch: develop (latest version, v0.23.0.dev0)
|
o branch: develop (latest version, v0.19.0.dev0)
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.22, tag: v0.22.1
|
| o branch: releases/v0.18, tag: v0.18.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.22.0
|
| o tag: v0.18.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
o
|
o
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.21, tag: v0.21.2
|
| o branch: releases/v0.17, tag: v0.17.2
|
||||||
o |
|
o |
|
||||||
| o tag: v0.21.1
|
| o tag: v0.17.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.21.0
|
| o tag: v0.17.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
@@ -747,8 +747,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
|||||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||||
|
|
||||||
Each Spack release series also has a corresponding branch, e.g.
|
Each Spack release series also has a corresponding branch, e.g.
|
||||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||||
tagged version on a release branch. Minor releases are back-ported from
|
tagged version on a release branch. Minor releases are back-ported from
|
||||||
develop onto release branches. This is typically done by cherry-picking
|
develop onto release branches. This is typically done by cherry-picking
|
||||||
bugfix commits off of ``develop``.
|
bugfix commits off of ``develop``.
|
||||||
@@ -778,40 +778,27 @@ for more details.
|
|||||||
Scheduling work for releases
|
Scheduling work for releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
We schedule work for **major releases** through `milestones
|
We schedule work for releases by creating `GitHub projects
|
||||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
several open release projects. For example, below are two releases (from
|
||||||
<https://github.com/spack/spack/labels>`_.
|
some past version of the page linked above):
|
||||||
|
|
||||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
.. image:: images/projects.png
|
||||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
|
||||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
|
||||||
when the release is made, and a new milestone is created for the next major release.
|
|
||||||
|
|
||||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
This image shows one release in progress for ``0.15.1`` and another for
|
||||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
``0.16.0``. Each of these releases has a project board containing issues
|
||||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
and pull requests. GitHub shows a status bar with completed work in
|
||||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
green, work in progress in purple, and work not started yet in gray, so
|
||||||
the project board.
|
it's fairly easy to see progress.
|
||||||
|
|
||||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
Spack's project boards are not firm commitments so we move work between
|
||||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
releases frequently. If we need to make a release and some tasks are not
|
||||||
release milestone, rather than delaying the release to complete them.
|
yet done, we will simply move them to the next minor or major release, rather
|
||||||
|
than delaying the release to complete them.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
For more on using GitHub project boards, see `GitHub's documentation
|
||||||
Backporting bug fixes
|
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
|
||||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
|
||||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
|
||||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
|
||||||
that the bug fix should be backported to.
|
|
||||||
|
|
||||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
|
||||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
|
||||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
|
||||||
Typically there are one or two backport pull requests open at any given time.
|
|
||||||
|
|
||||||
.. _major-releases:
|
.. _major-releases:
|
||||||
|
|
||||||
@@ -819,21 +806,25 @@ Typically there are one or two backport pull requests open at any given time.
|
|||||||
Making major releases
|
Making major releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
Assuming a project board has already been created and all required work
|
||||||
are:
|
completed, the steps to make the major release are:
|
||||||
|
|
||||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
#. Create two new project boards:
|
||||||
release.
|
|
||||||
|
|
||||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
* One for the next major release
|
||||||
|
* One for the next point release
|
||||||
|
|
||||||
#. Move any optional tasks that are not done to the next milestone.
|
#. Move any optional tasks that are not done to one of the new project boards.
|
||||||
|
|
||||||
|
In general, small bugfixes should go to the next point release. Major
|
||||||
|
features, refactors, and changes that could affect concretization should
|
||||||
|
go in the next major release.
|
||||||
|
|
||||||
#. Create a branch for the release, based on ``develop``:
|
#. Create a branch for the release, based on ``develop``:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git checkout -b releases/v0.23 develop
|
$ git checkout -b releases/v0.15 develop
|
||||||
|
|
||||||
For a version ``vX.Y.Z``, the branch's name should be
|
For a version ``vX.Y.Z``, the branch's name should be
|
||||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||||
@@ -869,8 +860,8 @@ are:
|
|||||||
|
|
||||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||||
For instance when you have just released ``v0.23.0``, set the version
|
For instance when you have just released ``v0.15.0``, set the version
|
||||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -879,52 +870,82 @@ are:
|
|||||||
#. Follow the steps in :ref:`announcing-releases`.
|
#. Follow the steps in :ref:`announcing-releases`.
|
||||||
|
|
||||||
|
|
||||||
.. _patch-releases:
|
.. _point-releases:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Making patch releases
|
Making point releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
Assuming a project board has already been created and all required work
|
||||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
completed, the steps to make the point release are:
|
||||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
|
||||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
|
||||||
changes are fresh in the mind of the developer.
|
|
||||||
|
|
||||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
#. Create a new project board for the next point release.
|
||||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
|
||||||
|
|
||||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
#. Move any optional tasks that are not done to the next project board.
|
||||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
|
||||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
|
||||||
``backports/vX.Y.Z`` branch.
|
|
||||||
|
|
||||||
.. warning::
|
#. Check out the release branch (it should already exist).
|
||||||
|
|
||||||
Sometimes you may **still** get merge conflicts even if you have
|
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||||
cherry-picked all the commits in order. This generally means there
|
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||||
is some other intervening pull request that the one you're trying
|
|
||||||
to pick depends on. In these cases, you'll need to make a judgment
|
|
||||||
call regarding those pull requests. Consider the number of affected
|
|
||||||
files and/or the resulting differences.
|
|
||||||
|
|
||||||
1. If the changes are small, you might just cherry-pick it.
|
.. code-block:: console
|
||||||
|
|
||||||
2. If the changes are large, then you may decide that this fix is not
|
$ git checkout releases/v0.15
|
||||||
worth including in a patch release, in which case you should remove
|
|
||||||
the label from the pull request. Remember that large, manual backports
|
|
||||||
are seldom the right choice for a patch release.
|
|
||||||
|
|
||||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||||
release as follows:
|
in the project, create it. This pull request ought to be created as early as
|
||||||
|
possible when working on a release project, so that we can build the release
|
||||||
|
commits incrementally, and identify potential conflicts at an early stage.
|
||||||
|
|
||||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||||
release.
|
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||||
|
|
||||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
This is **usually** fairly simple since we squash the commits from the
|
||||||
|
vast majority of pull requests. That means there is only one commit
|
||||||
|
per pull request to cherry-pick. For example, `this pull request
|
||||||
|
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||||
|
they were squashed into a single commit on merge. You can see the
|
||||||
|
commit that was created here:
|
||||||
|
|
||||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
.. image:: images/pr-commit.png
|
||||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
|
||||||
|
You can easily cherry pick it like this (assuming you already have the
|
||||||
|
release branch checked out):
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ git cherry-pick 7e46da7
|
||||||
|
|
||||||
|
For pull requests that were rebased (or not squashed), you'll need to
|
||||||
|
cherry-pick each associated commit individually.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
It is important to cherry-pick commits in the order they happened,
|
||||||
|
otherwise you can get conflicts while cherry-picking. When
|
||||||
|
cherry-picking look at the merge date,
|
||||||
|
**not** the number of the pull request or the date it was opened.
|
||||||
|
|
||||||
|
Sometimes you may **still** get merge conflicts even if you have
|
||||||
|
cherry-picked all the commits in order. This generally means there
|
||||||
|
is some other intervening pull request that the one you're trying
|
||||||
|
to pick depends on. In these cases, you'll need to make a judgment
|
||||||
|
call regarding those pull requests. Consider the number of affected
|
||||||
|
files and or the resulting differences.
|
||||||
|
|
||||||
|
1. If the dependency changes are small, you might just cherry-pick it,
|
||||||
|
too. If you do this, add the task to the release board.
|
||||||
|
|
||||||
|
2. If the changes are large, then you may decide that this fix is not
|
||||||
|
worth including in a point release, in which case you should remove
|
||||||
|
the task from the release project.
|
||||||
|
|
||||||
|
3. You can always decide to manually back-port the fix to the release
|
||||||
|
branch if neither of the above options makes sense, but this can
|
||||||
|
require a lot of work. It's seldom the right choice.
|
||||||
|
|
||||||
|
#. When all the commits from the project board are cherry-picked into
|
||||||
|
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||||
|
|
||||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||||
@@ -933,22 +954,20 @@ release as follows:
|
|||||||
release branch. See `the changelog from 0.14.1
|
release branch. See `the changelog from 0.14.1
|
||||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||||
|
|
||||||
#. Make sure CI passes on the **backports pull request**, including:
|
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||||
|
is needed to keep track in the release branch of all the commits that were
|
||||||
|
cherry-picked.
|
||||||
|
|
||||||
|
#. Make sure CI passes on the release branch, including:
|
||||||
|
|
||||||
* Regular unit tests
|
* Regular unit tests
|
||||||
* Build tests
|
* Build tests
|
||||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||||
|
|
||||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
If CI does not pass, you'll need to figure out why, and make changes
|
||||||
is needed to keep track in the release branch of all the commits that were
|
to the release branch until it does. You can make more commits, modify
|
||||||
cherry-picked.
|
or remove cherry-picked commits, or cherry-pick **more** from
|
||||||
|
``develop`` to make this happen.
|
||||||
#. Make sure CI passes on the last commit of the **release branch**.
|
|
||||||
|
|
||||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
|
||||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
|
||||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
|
||||||
repeat the process. Avoid repeated force pushes to the release branch.
|
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -1023,31 +1042,25 @@ Updating `releases/latest`
|
|||||||
|
|
||||||
If the new release is the **highest** Spack release yet, you should
|
If the new release is the **highest** Spack release yet, you should
|
||||||
also tag it as ``releases/latest``. For example, suppose the highest
|
also tag it as ``releases/latest``. For example, suppose the highest
|
||||||
release is currently ``0.22.3``:
|
release is currently ``0.15.3``:
|
||||||
|
|
||||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||||
|
|
||||||
* If you are making a new release of an **older** major version of
|
* If you are making a new release of an **older** major version of
|
||||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||||
``releases/latest`` (as there are newer major versions).
|
``releases/latest`` (as there are newer major versions).
|
||||||
|
|
||||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
To tag ``releases/latest``, do this:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||||
|
$ git tag --force releases/latest
|
||||||
|
$ git push --force --tags
|
||||||
|
|
||||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||||
|
``releases/latest`` tag with the new one.
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ git tag --force releases/latest vX.Y.Z
|
|
||||||
$ git push --force git@github.com:spack/spack releases/latest
|
|
||||||
|
|
||||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
|
||||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
|
||||||
local tags.
|
|
||||||
|
|
||||||
|
|
||||||
.. _announcing-releases:
|
.. _announcing-releases:
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
apt update
|
apt update
|
||||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||||
|
|
||||||
.. tab-item:: RHEL
|
.. tab-item:: RHEL
|
||||||
|
|
||||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
|||||||
|
|
||||||
dnf install epel-release
|
dnf install epel-release
|
||||||
dnf group install "Development Tools"
|
dnf group install "Development Tools"
|
||||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||||
|
|
||||||
.. tab-item:: macOS Brew
|
.. tab-item:: macOS Brew
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
brew update
|
brew update
|
||||||
brew install gcc git zip
|
brew install curl gcc git gnupg zip
|
||||||
|
|
||||||
------------
|
------------
|
||||||
Installation
|
Installation
|
||||||
@@ -1364,187 +1364,6 @@ This will write the private key to the file `dinosaur.priv`.
|
|||||||
or for help on an issue or the Spack slack.
|
or for help on an issue or the Spack slack.
|
||||||
|
|
||||||
|
|
||||||
.. _cray-support:
|
|
||||||
|
|
||||||
-------------
|
|
||||||
Spack on Cray
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Spack differs slightly when used on a Cray system. The architecture spec
|
|
||||||
can differentiate between the front-end and back-end processor and operating system.
|
|
||||||
For example, on Edison at NERSC, the back-end target processor
|
|
||||||
is "Ivy Bridge", so you can specify to use the back-end this way:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib target=ivybridge
|
|
||||||
|
|
||||||
You can also use the operating system to build against the back-end:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib os=CNL10
|
|
||||||
|
|
||||||
Notice that the name includes both the operating system name and the major
|
|
||||||
version number concatenated together.
|
|
||||||
|
|
||||||
Alternatively, if you want to build something for the front-end,
|
|
||||||
you can specify the front-end target processor. The processor for a login node
|
|
||||||
on Edison is "Sandy bridge" so we specify on the command line like so:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib target=sandybridge
|
|
||||||
|
|
||||||
And the front-end operating system is:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib os=SuSE11
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Cray compiler detection
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack can detect compilers using two methods. For the front-end, we treat
|
|
||||||
everything the same. The difference lies in back-end compiler detection.
|
|
||||||
Back-end compiler detection is made via the Tcl module avail command.
|
|
||||||
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
|
||||||
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
|
||||||
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
|
||||||
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
|
||||||
|
|
||||||
The compilers.yaml config file will also differ. There is a
|
|
||||||
modules section that is filled with the compiler's Programming Environment
|
|
||||||
and module name. On other systems, this field is empty []:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
- compiler:
|
|
||||||
modules:
|
|
||||||
- PrgEnv-intel
|
|
||||||
- intel/15.0.109
|
|
||||||
|
|
||||||
As mentioned earlier, the compiler paths will look different on a Cray system.
|
|
||||||
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
|
||||||
compiler are replaced with their respective Cray compiler wrapper names:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
paths:
|
|
||||||
cc: cc
|
|
||||||
cxx: CC
|
|
||||||
f77: ftn
|
|
||||||
fc: ftn
|
|
||||||
|
|
||||||
As opposed to an explicit path to the compiler executable. This allows Spack
|
|
||||||
to call the Cray compiler wrappers during build time.
|
|
||||||
|
|
||||||
For more on compiler configuration, check out :ref:`compiler-config`.
|
|
||||||
|
|
||||||
Spack sets the default Cray link type to dynamic, to better match other
|
|
||||||
other platforms. Individual packages can enable static linking (which is the
|
|
||||||
default outside of Spack on cray systems) using the ``-static`` flag.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Setting defaults and using Cray modules
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
If you want to use default compilers for each PrgEnv and also be able
|
|
||||||
to load cray external modules, you will need to set up a ``packages.yaml``.
|
|
||||||
|
|
||||||
Here's an example of an external configuration for cray modules:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
mpich:
|
|
||||||
externals:
|
|
||||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
all:
|
|
||||||
providers:
|
|
||||||
mpi: [mpich]
|
|
||||||
|
|
||||||
This tells Spack that for whatever package that depends on mpi, load the
|
|
||||||
cray-mpich module into the environment. You can then be able to use whatever
|
|
||||||
environment variables, libraries, etc, that are brought into the environment
|
|
||||||
via module load.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
|
||||||
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
|
||||||
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
|
||||||
compiler wrapper link line).
|
|
||||||
|
|
||||||
You can set the default compiler that Spack can use for each compiler type.
|
|
||||||
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
|
|
||||||
In the compiler field, set the compiler specs in your order of preference.
|
|
||||||
Whenever you build with that compiler type, Spack will concretize to that version.
|
|
||||||
|
|
||||||
Here is an example of a full packages.yaml used at NERSC
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
mpich:
|
|
||||||
externals:
|
|
||||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
buildable: False
|
|
||||||
netcdf:
|
|
||||||
externals:
|
|
||||||
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-netcdf
|
|
||||||
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-netcdf
|
|
||||||
buildable: False
|
|
||||||
hdf5:
|
|
||||||
externals:
|
|
||||||
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-hdf5
|
|
||||||
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-hdf5
|
|
||||||
buildable: False
|
|
||||||
all:
|
|
||||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
|
||||||
providers:
|
|
||||||
mpi: [mpich]
|
|
||||||
|
|
||||||
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
|
||||||
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
|
||||||
for each compiler type for each cray modules. This ensures that for each
|
|
||||||
compiler on our system we can use that external module.
|
|
||||||
|
|
||||||
For more on external packages check out the section :ref:`sec-external-packages`.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using Linux containers on Cray machines
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack uses environment variables particular to the Cray programming
|
|
||||||
environment to determine which systems are Cray platforms. These
|
|
||||||
environment variables may be propagated into containers that are not
|
|
||||||
using the Cray programming environment.
|
|
||||||
|
|
||||||
To ensure that Spack does not autodetect the Cray programming
|
|
||||||
environment, unset the environment variable ``MODULEPATH``. This
|
|
||||||
will cause Spack to treat a linux container on a Cray system as a base
|
|
||||||
linux distro.
|
|
||||||
|
|
||||||
.. _windows_support:
|
.. _windows_support:
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
BIN
lib/spack/docs/images/pr-commit.png
Normal file
BIN
lib/spack/docs/images/pr-commit.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
BIN
lib/spack/docs/images/projects.png
Normal file
BIN
lib/spack/docs/images/projects.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 68 KiB |
@@ -12,6 +12,10 @@
|
|||||||
Spack
|
Spack
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
.. epigraph::
|
||||||
|
|
||||||
|
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||||
|
|
||||||
Spack is a package management tool designed to support multiple
|
Spack is a package management tool designed to support multiple
|
||||||
versions and configurations of software on a wide variety of platforms
|
versions and configurations of software on a wide variety of platforms
|
||||||
and environments. It was designed for large supercomputing centers,
|
and environments. It was designed for large supercomputing centers,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -253,17 +253,6 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
|||||||
spack has a way to know for certain about the status of any concrete spec on
|
spack has a way to know for certain about the status of any concrete spec on
|
||||||
the remote mirror, but can slow down pipeline generation significantly.
|
the remote mirror, but can slow down pipeline generation significantly.
|
||||||
|
|
||||||
The ``--optimize`` argument is experimental and runs the generated pipeline
|
|
||||||
document through a series of optimization passes designed to reduce the size
|
|
||||||
of the generated file.
|
|
||||||
|
|
||||||
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
|
||||||
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
|
||||||
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
|
||||||
is that before any job can begin, all jobs in previous stages must first
|
|
||||||
complete. The benefit is that Gitlab allows more dependencies to be listed
|
|
||||||
when using ``dependencies`` instead of ``needs``.
|
|
||||||
|
|
||||||
The optional ``--output-file`` argument should be an absolute path (including
|
The optional ``--output-file`` argument should be an absolute path (including
|
||||||
file name) to the generated pipeline, and if not given, the default is
|
file name) to the generated pipeline, and if not given, the default is
|
||||||
``./.gitlab-ci.yml``.
|
``./.gitlab-ci.yml``.
|
||||||
|
|||||||
@@ -476,9 +476,3 @@ implemented using Python's built-in `sys.path
|
|||||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||||
<https://docs.python.org/2/library/imp.html>`_.
|
<https://docs.python.org/2/library/imp.html>`_.
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
The mechanism for extending packages is not yet extensively tested,
|
|
||||||
and extending packages across repositories imposes inter-repo
|
|
||||||
dependencies, which may be hard to manage. Use this feature at your
|
|
||||||
own risk, but let us know if you have a use case for it.
|
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==7.2.6
|
sphinx==7.2.6
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.5.0
|
sphinx_design==0.6.0
|
||||||
sphinx-rtd-theme==2.0.0
|
sphinx-rtd-theme==2.0.0
|
||||||
python-levenshtein==0.25.1
|
python-levenshtein==0.25.1
|
||||||
docutils==0.20.1
|
docutils==0.20.1
|
||||||
pygments==2.17.2
|
pygments==2.18.0
|
||||||
urllib3==2.2.1
|
urllib3==2.2.2
|
||||||
pytest==8.2.0
|
pytest==8.2.2
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.4.2
|
black==24.4.2
|
||||||
flake8==7.0.0
|
flake8==7.1.0
|
||||||
mypy==1.10.0
|
mypy==1.10.1
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
12
lib/spack/external/archspec/cpu/detect.py
vendored
12
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,11 +47,7 @@ def decorator(factory):
|
|||||||
|
|
||||||
|
|
||||||
def partial_uarch(
|
def partial_uarch(
|
||||||
name: str = "",
|
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||||
vendor: str = "",
|
|
||||||
features: Optional[Set[str]] = None,
|
|
||||||
generation: int = 0,
|
|
||||||
cpu_part: str = "",
|
|
||||||
) -> Microarchitecture:
|
) -> Microarchitecture:
|
||||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||||
return Microarchitecture(
|
return Microarchitecture(
|
||||||
@@ -61,7 +57,6 @@ def partial_uarch(
|
|||||||
features=features or set(),
|
features=features or set(),
|
||||||
compilers={},
|
compilers={},
|
||||||
generation=generation,
|
generation=generation,
|
||||||
cpu_part=cpu_part,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
|
|||||||
return partial_uarch(
|
return partial_uarch(
|
||||||
vendor=_canonicalize_aarch64_vendor(data),
|
vendor=_canonicalize_aarch64_vendor(data),
|
||||||
features=_feature_set(data, key="Features"),
|
features=_feature_set(data, key="Features"),
|
||||||
cpu_part=data.get("CPU part", ""),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if architecture in (PPC64LE, PPC64):
|
if architecture in (PPC64LE, PPC64):
|
||||||
@@ -351,10 +345,6 @@ def sorting_fn(item):
|
|||||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||||
best_generic = max(generic_candidates, key=sorting_fn)
|
best_generic = max(generic_candidates, key=sorting_fn)
|
||||||
|
|
||||||
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
|
||||||
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
|
||||||
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
|
||||||
|
|
||||||
# Filter the candidates to be descendant of the best generic candidate.
|
# Filter the candidates to be descendant of the best generic candidate.
|
||||||
# This is to avoid that the lack of a niche feature that can be disabled
|
# This is to avoid that the lack of a niche feature that can be disabled
|
||||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||||
|
|||||||
@@ -2,7 +2,9 @@
|
|||||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Types and functions to manage information on CPU microarchitectures."""
|
"""Types and functions to manage information
|
||||||
|
on CPU microarchitectures.
|
||||||
|
"""
|
||||||
import functools
|
import functools
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
@@ -63,31 +65,23 @@ class Microarchitecture:
|
|||||||
passed in as argument above.
|
passed in as argument above.
|
||||||
* versions: versions that support this micro-architecture.
|
* versions: versions that support this micro-architecture.
|
||||||
|
|
||||||
generation (int): generation of the micro-architecture, if relevant.
|
generation (int): generation of the micro-architecture, if
|
||||||
cpu_part (str): cpu part of the architecture, if relevant.
|
relevant.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
# pylint: disable=too-many-arguments
|
||||||
#: Aliases for micro-architecture's features
|
#: Aliases for micro-architecture's features
|
||||||
feature_aliases = FEATURE_ALIASES
|
feature_aliases = FEATURE_ALIASES
|
||||||
|
|
||||||
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.parents = parents
|
self.parents = parents
|
||||||
self.vendor = vendor
|
self.vendor = vendor
|
||||||
self.features = features
|
self.features = features
|
||||||
self.compilers = compilers
|
self.compilers = compilers
|
||||||
# Only relevant for PowerPC
|
|
||||||
self.generation = generation
|
self.generation = generation
|
||||||
# Only relevant for AArch64
|
# Cache the ancestor computation
|
||||||
self.cpu_part = cpu_part
|
|
||||||
|
|
||||||
# Cache the "ancestor" computation
|
|
||||||
self._ancestors = None
|
self._ancestors = None
|
||||||
# Cache the "generic" computation
|
|
||||||
self._generic = None
|
|
||||||
# Cache the "family" computation
|
|
||||||
self._family = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ancestors(self):
|
def ancestors(self):
|
||||||
@@ -117,12 +111,8 @@ def __eq__(self, other):
|
|||||||
and self.parents == other.parents # avoid ancestors here
|
and self.parents == other.parents # avoid ancestors here
|
||||||
and self.compilers == other.compilers
|
and self.compilers == other.compilers
|
||||||
and self.generation == other.generation
|
and self.generation == other.generation
|
||||||
and self.cpu_part == other.cpu_part
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(self.name)
|
|
||||||
|
|
||||||
@coerce_target_names
|
@coerce_target_names
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self == other
|
return not self == other
|
||||||
@@ -153,8 +143,7 @@ def __repr__(self):
|
|||||||
cls_name = self.__class__.__name__
|
cls_name = self.__class__.__name__
|
||||||
fmt = (
|
fmt = (
|
||||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||||
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
||||||
"cpu_part={0.cpu_part!r})"
|
|
||||||
)
|
)
|
||||||
return fmt.format(self)
|
return fmt.format(self)
|
||||||
|
|
||||||
@@ -179,22 +168,18 @@ def __contains__(self, feature):
|
|||||||
@property
|
@property
|
||||||
def family(self):
|
def family(self):
|
||||||
"""Returns the architecture family a given target belongs to"""
|
"""Returns the architecture family a given target belongs to"""
|
||||||
if self._family is None:
|
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
msg = "a target is expected to belong to just one architecture family"
|
||||||
msg = "a target is expected to belong to just one architecture family"
|
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
assert len(roots) == 1, msg
|
||||||
assert len(roots) == 1, msg
|
|
||||||
self._family = roots.pop()
|
|
||||||
|
|
||||||
return self._family
|
return roots.pop()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def generic(self):
|
def generic(self):
|
||||||
"""Returns the best generic architecture that is compatible with self"""
|
"""Returns the best generic architecture that is compatible with self"""
|
||||||
if self._generic is None:
|
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
return max(generics, key=lambda x: len(x.ancestors))
|
||||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
|
||||||
return self._generic
|
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Returns a dictionary representation of this object."""
|
"""Returns a dictionary representation of this object."""
|
||||||
@@ -205,7 +190,6 @@ def to_dict(self):
|
|||||||
"generation": self.generation,
|
"generation": self.generation,
|
||||||
"parents": [str(x) for x in self.parents],
|
"parents": [str(x) for x in self.parents],
|
||||||
"compilers": self.compilers,
|
"compilers": self.compilers,
|
||||||
"cpupart": self.cpu_part,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -218,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
|
|||||||
features=set(data["features"]),
|
features=set(data["features"]),
|
||||||
compilers=data.get("compilers", {}),
|
compilers=data.get("compilers", {}),
|
||||||
generation=data.get("generation", 0),
|
generation=data.get("generation", 0),
|
||||||
cpu_part=data.get("cpupart", ""),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def optimization_flags(self, compiler, version):
|
def optimization_flags(self, compiler, version):
|
||||||
@@ -377,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
|
|||||||
features = set(values["features"])
|
features = set(values["features"])
|
||||||
compilers = values.get("compilers", {})
|
compilers = values.get("compilers", {})
|
||||||
generation = values.get("generation", 0)
|
generation = values.get("generation", 0)
|
||||||
cpu_part = values.get("cpupart", "")
|
|
||||||
|
|
||||||
targets[name] = Microarchitecture(
|
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||||
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
|
||||||
)
|
|
||||||
|
|
||||||
known_targets = {}
|
known_targets = {}
|
||||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||||
|
|||||||
@@ -1482,6 +1482,7 @@
|
|||||||
"cldemote",
|
"cldemote",
|
||||||
"movdir64b",
|
"movdir64b",
|
||||||
"movdiri",
|
"movdiri",
|
||||||
|
"pdcm",
|
||||||
"serialize",
|
"serialize",
|
||||||
"waitpkg"
|
"waitpkg"
|
||||||
],
|
],
|
||||||
@@ -2224,96 +2225,14 @@
|
|||||||
],
|
],
|
||||||
"nvhpc": [
|
"nvhpc": [
|
||||||
{
|
{
|
||||||
"versions": "21.11:23.8",
|
"versions": "21.11:",
|
||||||
"name": "zen3",
|
"name": "zen3",
|
||||||
"flags": "-tp {name}",
|
"flags": "-tp {name}",
|
||||||
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "23.9:",
|
|
||||||
"flags": "-tp {name}"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"zen5": {
|
|
||||||
"from": ["zen4"],
|
|
||||||
"vendor": "AuthenticAMD",
|
|
||||||
"features": [
|
|
||||||
"abm",
|
|
||||||
"aes",
|
|
||||||
"avx",
|
|
||||||
"avx2",
|
|
||||||
"avx512_bf16",
|
|
||||||
"avx512_bitalg",
|
|
||||||
"avx512bw",
|
|
||||||
"avx512cd",
|
|
||||||
"avx512dq",
|
|
||||||
"avx512f",
|
|
||||||
"avx512ifma",
|
|
||||||
"avx512vbmi",
|
|
||||||
"avx512_vbmi2",
|
|
||||||
"avx512vl",
|
|
||||||
"avx512_vnni",
|
|
||||||
"avx512_vp2intersect",
|
|
||||||
"avx512_vpopcntdq",
|
|
||||||
"avx_vnni",
|
|
||||||
"bmi1",
|
|
||||||
"bmi2",
|
|
||||||
"clflushopt",
|
|
||||||
"clwb",
|
|
||||||
"clzero",
|
|
||||||
"cppc",
|
|
||||||
"cx16",
|
|
||||||
"f16c",
|
|
||||||
"flush_l1d",
|
|
||||||
"fma",
|
|
||||||
"fsgsbase",
|
|
||||||
"gfni",
|
|
||||||
"ibrs_enhanced",
|
|
||||||
"mmx",
|
|
||||||
"movbe",
|
|
||||||
"movdir64b",
|
|
||||||
"movdiri",
|
|
||||||
"pclmulqdq",
|
|
||||||
"popcnt",
|
|
||||||
"rdseed",
|
|
||||||
"sse",
|
|
||||||
"sse2",
|
|
||||||
"sse4_1",
|
|
||||||
"sse4_2",
|
|
||||||
"sse4a",
|
|
||||||
"ssse3",
|
|
||||||
"tsc_adjust",
|
|
||||||
"vaes",
|
|
||||||
"vpclmulqdq",
|
|
||||||
"xsavec",
|
|
||||||
"xsaveopt"
|
|
||||||
],
|
|
||||||
"compilers": {
|
|
||||||
"gcc": [
|
|
||||||
{
|
|
||||||
"versions": "14.1:",
|
|
||||||
"name": "znver5",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"aocc": [
|
|
||||||
{
|
|
||||||
"versions": "5.0:",
|
|
||||||
"name": "znver5",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"clang": [
|
|
||||||
{
|
|
||||||
"versions": "19.1:",
|
|
||||||
"name": "znver5",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ppc64": {
|
"ppc64": {
|
||||||
"from": [],
|
"from": [],
|
||||||
"vendor": "generic",
|
"vendor": "generic",
|
||||||
@@ -2792,8 +2711,7 @@
|
|||||||
"flags": "-mcpu=thunderx2t99"
|
"flags": "-mcpu=thunderx2t99"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0x0af"
|
|
||||||
},
|
},
|
||||||
"a64fx": {
|
"a64fx": {
|
||||||
"from": ["armv8.2a"],
|
"from": ["armv8.2a"],
|
||||||
@@ -2861,8 +2779,7 @@
|
|||||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0x001"
|
|
||||||
},
|
},
|
||||||
"cortex_a72": {
|
"cortex_a72": {
|
||||||
"from": ["aarch64"],
|
"from": ["aarch64"],
|
||||||
@@ -2899,8 +2816,7 @@
|
|||||||
"flags" : "-mcpu=cortex-a72"
|
"flags" : "-mcpu=cortex-a72"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0xd08"
|
|
||||||
},
|
},
|
||||||
"neoverse_n1": {
|
"neoverse_n1": {
|
||||||
"from": ["cortex_a72", "armv8.2a"],
|
"from": ["cortex_a72", "armv8.2a"],
|
||||||
@@ -2921,7 +2837,8 @@
|
|||||||
"asimdrdm",
|
"asimdrdm",
|
||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"asimddp"
|
"asimddp",
|
||||||
|
"ssbs"
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
@@ -2985,8 +2902,7 @@
|
|||||||
"flags": "-tp {name}"
|
"flags": "-tp {name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0xd0c"
|
|
||||||
},
|
},
|
||||||
"neoverse_v1": {
|
"neoverse_v1": {
|
||||||
"from": ["neoverse_n1", "armv8.4a"],
|
"from": ["neoverse_n1", "armv8.4a"],
|
||||||
@@ -3010,6 +2926,8 @@
|
|||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"sha3",
|
"sha3",
|
||||||
|
"sm3",
|
||||||
|
"sm4",
|
||||||
"asimddp",
|
"asimddp",
|
||||||
"sha512",
|
"sha512",
|
||||||
"sve",
|
"sve",
|
||||||
@@ -3018,6 +2936,7 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
|
"ssbs",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3085,7 +3004,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "11:",
|
"versions": "11:",
|
||||||
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng"
|
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "12:",
|
"versions": "12:",
|
||||||
@@ -3109,8 +3028,7 @@
|
|||||||
"flags": "-tp {name}"
|
"flags": "-tp {name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0xd40"
|
|
||||||
},
|
},
|
||||||
"neoverse_v2": {
|
"neoverse_v2": {
|
||||||
"from": ["neoverse_n1", "armv9.0a"],
|
"from": ["neoverse_n1", "armv9.0a"],
|
||||||
@@ -3134,22 +3052,32 @@
|
|||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"sha3",
|
"sha3",
|
||||||
|
"sm3",
|
||||||
|
"sm4",
|
||||||
"asimddp",
|
"asimddp",
|
||||||
"sha512",
|
"sha512",
|
||||||
"sve",
|
"sve",
|
||||||
"asimdfhm",
|
"asimdfhm",
|
||||||
|
"dit",
|
||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
|
"ssbs",
|
||||||
"sb",
|
"sb",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
|
"sveaes",
|
||||||
|
"svepmull",
|
||||||
|
"svebitperm",
|
||||||
|
"svesha3",
|
||||||
|
"svesm4",
|
||||||
"flagm2",
|
"flagm2",
|
||||||
"frint",
|
"frint",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
"i8mm",
|
"i8mm",
|
||||||
"bf16"
|
"bf16",
|
||||||
|
"dgh"
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
@@ -3174,19 +3102,15 @@
|
|||||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "10.0:11.3.99",
|
"versions": "10.0:11.99",
|
||||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"versions": "11.4:11.99",
|
|
||||||
"flags" : "-mcpu=neoverse-v2"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"versions": "12.0:12.2.99",
|
"versions": "12.0:12.99",
|
||||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "12.3:",
|
"versions": "13.0:",
|
||||||
"flags" : "-mcpu=neoverse-v2"
|
"flags" : "-mcpu=neoverse-v2"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -3221,112 +3145,7 @@
|
|||||||
"flags": "-tp {name}"
|
"flags": "-tp {name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0xd4f"
|
|
||||||
},
|
|
||||||
"neoverse_n2": {
|
|
||||||
"from": ["neoverse_n1", "armv9.0a"],
|
|
||||||
"vendor": "ARM",
|
|
||||||
"features": [
|
|
||||||
"fp",
|
|
||||||
"asimd",
|
|
||||||
"evtstrm",
|
|
||||||
"aes",
|
|
||||||
"pmull",
|
|
||||||
"sha1",
|
|
||||||
"sha2",
|
|
||||||
"crc32",
|
|
||||||
"atomics",
|
|
||||||
"fphp",
|
|
||||||
"asimdhp",
|
|
||||||
"cpuid",
|
|
||||||
"asimdrdm",
|
|
||||||
"jscvt",
|
|
||||||
"fcma",
|
|
||||||
"lrcpc",
|
|
||||||
"dcpop",
|
|
||||||
"sha3",
|
|
||||||
"asimddp",
|
|
||||||
"sha512",
|
|
||||||
"sve",
|
|
||||||
"asimdfhm",
|
|
||||||
"uscat",
|
|
||||||
"ilrcpc",
|
|
||||||
"flagm",
|
|
||||||
"sb",
|
|
||||||
"dcpodp",
|
|
||||||
"sve2",
|
|
||||||
"flagm2",
|
|
||||||
"frint",
|
|
||||||
"svei8mm",
|
|
||||||
"svebf16",
|
|
||||||
"i8mm",
|
|
||||||
"bf16"
|
|
||||||
],
|
|
||||||
"compilers" : {
|
|
||||||
"gcc": [
|
|
||||||
{
|
|
||||||
"versions": "4.8:5.99",
|
|
||||||
"flags": "-march=armv8-a"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "6:6.99",
|
|
||||||
"flags" : "-march=armv8.1-a"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "7.0:7.99",
|
|
||||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "8.0:8.99",
|
|
||||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "9.0:9.99",
|
|
||||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "10.0:10.99",
|
|
||||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "11.0:",
|
|
||||||
"flags" : "-mcpu=neoverse-n2"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"clang" : [
|
|
||||||
{
|
|
||||||
"versions": "9.0:10.99",
|
|
||||||
"flags" : "-march=armv8.5-a+sve"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "11.0:13.99",
|
|
||||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "14.0:15.99",
|
|
||||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"versions": "16.0:",
|
|
||||||
"flags" : "-mcpu=neoverse-n2"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"arm" : [
|
|
||||||
{
|
|
||||||
"versions": "23.04.0:",
|
|
||||||
"flags" : "-mcpu=neoverse-n2"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"nvhpc" : [
|
|
||||||
{
|
|
||||||
"versions": "23.3:",
|
|
||||||
"name": "neoverse-n1",
|
|
||||||
"flags": "-tp {name}"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"cpupart": "0xd49"
|
|
||||||
},
|
},
|
||||||
"m1": {
|
"m1": {
|
||||||
"from": ["armv8.4a"],
|
"from": ["armv8.4a"],
|
||||||
@@ -3392,8 +3211,7 @@
|
|||||||
"flags" : "-mcpu=apple-m1"
|
"flags" : "-mcpu=apple-m1"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0x022"
|
|
||||||
},
|
},
|
||||||
"m2": {
|
"m2": {
|
||||||
"from": ["m1", "armv8.5a"],
|
"from": ["m1", "armv8.5a"],
|
||||||
@@ -3471,8 +3289,7 @@
|
|||||||
"flags" : "-mcpu=apple-m2"
|
"flags" : "-mcpu=apple-m2"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
"cpupart": "0x032"
|
|
||||||
},
|
},
|
||||||
"arm": {
|
"arm": {
|
||||||
"from": [],
|
"from": [],
|
||||||
|
|||||||
@@ -52,9 +52,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"cpupart": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@@ -110,4 +107,4 @@
|
|||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -766,7 +766,6 @@ def copy_tree(
|
|||||||
src: str,
|
src: str,
|
||||||
dest: str,
|
dest: str,
|
||||||
symlinks: bool = True,
|
symlinks: bool = True,
|
||||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
|
||||||
ignore: Optional[Callable[[str], bool]] = None,
|
ignore: Optional[Callable[[str], bool]] = None,
|
||||||
_permissions: bool = False,
|
_permissions: bool = False,
|
||||||
):
|
):
|
||||||
@@ -789,8 +788,6 @@ def copy_tree(
|
|||||||
src (str): the directory to copy
|
src (str): the directory to copy
|
||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
@@ -798,8 +795,6 @@ def copy_tree(
|
|||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
if allow_broken_symlinks and sys.platform == "win32":
|
|
||||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
|
||||||
if _permissions:
|
if _permissions:
|
||||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||||
else:
|
else:
|
||||||
@@ -872,16 +867,14 @@ def escaped_path(path):
|
|||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
for target, d, s in links:
|
for target, d, s in links:
|
||||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
symlink(target, d)
|
||||||
if _permissions:
|
if _permissions:
|
||||||
set_install_permissions(d)
|
set_install_permissions(d)
|
||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def install_tree(
|
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
|
||||||
):
|
|
||||||
"""Recursively install an entire directory tree rooted at *src*.
|
"""Recursively install an entire directory tree rooted at *src*.
|
||||||
|
|
||||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||||
@@ -892,21 +885,12 @@ def install_tree(
|
|||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(
|
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||||
src,
|
|
||||||
dest,
|
|
||||||
symlinks=symlinks,
|
|
||||||
allow_broken_symlinks=allow_broken_symlinks,
|
|
||||||
ignore=ignore,
|
|
||||||
_permissions=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
|
|||||||
@@ -8,6 +8,7 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
@@ -16,92 +17,66 @@
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
|
|
||||||
is_windows = sys.platform == "win32"
|
|
||||||
|
|
||||||
|
def _windows_symlink(
|
||||||
|
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
|
||||||
|
):
|
||||||
|
"""On Windows with System Administrator privileges this will be a normal symbolic link via
|
||||||
|
os.symlink. On Windows without privledges the link will be a junction for a directory and a
|
||||||
|
hardlink for a file. On Windows the various link types are:
|
||||||
|
|
||||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
|
||||||
"""
|
even to a remote file or directory (using UNC in its path). Need System Administrator
|
||||||
Create a link.
|
privileges to make these.
|
||||||
|
|
||||||
On non-Windows and Windows with System Administrator
|
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
|
||||||
privleges this will be a normal symbolic link via
|
has at least 1 hard link (file's name). But when this method creates a new hard link there will
|
||||||
os.symlink.
|
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
|
||||||
|
privileges.
|
||||||
|
|
||||||
On Windows without privledges the link will be a
|
Junction: A link to a directory on the same or different volume (drive letter) but not to a
|
||||||
junction for a directory and a hardlink for a file.
|
remote directory. Don't need System Administrator privileges."""
|
||||||
On Windows the various link types are:
|
source_path = os.path.normpath(src)
|
||||||
|
|
||||||
Symbolic Link: A link to a file or directory on the
|
|
||||||
same or different volume (drive letter) or even to
|
|
||||||
a remote file or directory (using UNC in its path).
|
|
||||||
Need System Administrator privileges to make these.
|
|
||||||
|
|
||||||
Hard Link: A link to a file on the same volume (drive
|
|
||||||
letter) only. Every file (file's data) has at least 1
|
|
||||||
hard link (file's name). But when this method creates
|
|
||||||
a new hard link there will be 2. Deleting all hard
|
|
||||||
links effectively deletes the file. Don't need System
|
|
||||||
Administrator privileges.
|
|
||||||
|
|
||||||
Junction: A link to a directory on the same or different
|
|
||||||
volume (drive letter) but not to a remote directory. Don't
|
|
||||||
need System Administrator privileges.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
source_path (str): The real file or directory that the link points to.
|
|
||||||
Must be absolute OR relative to the link.
|
|
||||||
link_path (str): The path where the link will exist.
|
|
||||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
|
||||||
doesn't exist. This will still raise an exception on Windows.
|
|
||||||
"""
|
|
||||||
source_path = os.path.normpath(source_path)
|
|
||||||
win_source_path = source_path
|
win_source_path = source_path
|
||||||
link_path = os.path.normpath(link_path)
|
link_path = os.path.normpath(dst)
|
||||||
|
|
||||||
# Never allow broken links on Windows.
|
# Perform basic checks to make sure symlinking will succeed
|
||||||
if sys.platform == "win32" and allow_broken_symlinks:
|
if os.path.lexists(link_path):
|
||||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||||
|
|
||||||
if not allow_broken_symlinks:
|
if not os.path.exists(source_path):
|
||||||
# Perform basic checks to make sure symlinking will succeed
|
if os.path.isabs(source_path):
|
||||||
if os.path.lexists(link_path):
|
# An absolute source path that does not exist will result in a broken link.
|
||||||
raise AlreadyExistsError(
|
raise SymlinkError(
|
||||||
f"Link path ({link_path}) already exists. Cannot create link."
|
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||||
|
f"link would be broken so not making link."
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
if not os.path.exists(source_path):
|
# os.symlink can create a link when the given source path is relative to
|
||||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
# the link path. Emulate this behavior and check to see if the source exists
|
||||||
# An absolute source path that does not exist will result in a broken link.
|
# relative to the link path ahead of link creation to prevent broken
|
||||||
raise SymlinkError(
|
# links from being made.
|
||||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
link_parent_dir = os.path.dirname(link_path)
|
||||||
f"link would be broken so not making link."
|
relative_path = os.path.join(link_parent_dir, source_path)
|
||||||
)
|
if os.path.exists(relative_path):
|
||||||
|
# In order to work on windows, the source path needs to be modified to be
|
||||||
|
# relative because hardlink/junction dont resolve relative paths the same
|
||||||
|
# way as os.symlink. This is ignored on other operating systems.
|
||||||
|
win_source_path = relative_path
|
||||||
else:
|
else:
|
||||||
# os.symlink can create a link when the given source path is relative to
|
raise SymlinkError(
|
||||||
# the link path. Emulate this behavior and check to see if the source exists
|
f"The source path ({source_path}) is not relative to the link path "
|
||||||
# relative to the link path ahead of link creation to prevent broken
|
f"({link_path}). Resulting link would be broken so not making link."
|
||||||
# links from being made.
|
)
|
||||||
link_parent_dir = os.path.dirname(link_path)
|
|
||||||
relative_path = os.path.join(link_parent_dir, source_path)
|
|
||||||
if os.path.exists(relative_path):
|
|
||||||
# In order to work on windows, the source path needs to be modified to be
|
|
||||||
# relative because hardlink/junction dont resolve relative paths the same
|
|
||||||
# way as os.symlink. This is ignored on other operating systems.
|
|
||||||
win_source_path = relative_path
|
|
||||||
elif not allow_broken_symlinks:
|
|
||||||
raise SymlinkError(
|
|
||||||
f"The source path ({source_path}) is not relative to the link path "
|
|
||||||
f"({link_path}). Resulting link would be broken so not making link."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create the symlink
|
# Create the symlink
|
||||||
if sys.platform == "win32" and not _windows_can_symlink():
|
if not _windows_can_symlink():
|
||||||
_windows_create_link(win_source_path, link_path)
|
_windows_create_link(win_source_path, link_path)
|
||||||
else:
|
else:
|
||||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||||
|
|
||||||
|
|
||||||
def islink(path: str) -> bool:
|
def _windows_islink(path: str) -> bool:
|
||||||
"""Override os.islink to give correct answer for spack logic.
|
"""Override os.islink to give correct answer for spack logic.
|
||||||
|
|
||||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||||
@@ -269,7 +244,7 @@ def _windows_create_hard_link(path: str, link: str):
|
|||||||
CreateHardLink(link, path)
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
def readlink(path: str, *, dir_fd=None):
|
def _windows_readlink(path: str, *, dir_fd=None):
|
||||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||||
if _windows_is_hardlink(path):
|
if _windows_is_hardlink(path):
|
||||||
return _windows_read_hard_link(path)
|
return _windows_read_hard_link(path)
|
||||||
@@ -338,6 +313,16 @@ def resolve_link_target_relative_to_the_link(link):
|
|||||||
return os.path.join(link_dir, target)
|
return os.path.join(link_dir, target)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.platform == "win32":
|
||||||
|
symlink = _windows_symlink
|
||||||
|
readlink = _windows_readlink
|
||||||
|
islink = _windows_islink
|
||||||
|
else:
|
||||||
|
symlink = os.symlink
|
||||||
|
readlink = os.readlink
|
||||||
|
islink = os.path.islink
|
||||||
|
|
||||||
|
|
||||||
class SymlinkError(RuntimeError):
|
class SymlinkError(RuntimeError):
|
||||||
"""Exception class for errors raised while creating symlinks,
|
"""Exception class for errors raised while creating symlinks,
|
||||||
junctions and hard links
|
junctions and hard links
|
||||||
|
|||||||
@@ -18,10 +18,9 @@
|
|||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from multiprocessing.connection import Connection
|
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Callable, Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -34,8 +33,23 @@
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
||||||
|
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
||||||
|
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
||||||
|
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
||||||
|
csi_pre = f"{esc}{lbracket}"
|
||||||
|
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
||||||
|
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
||||||
|
# General ansi escape sequences have well-defined prefixes,
|
||||||
|
# but content and suffixes are less reliable.
|
||||||
|
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
||||||
|
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
||||||
|
esc_pre = f"{esc}[@-_]"
|
||||||
|
esc_content = f"[^{esc}{bell}{newline}]"
|
||||||
|
esc_post = f"(?:{esc}{bslash}|{bell})"
|
||||||
|
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
||||||
# Use this to strip escape sequences
|
# Use this to strip escape sequences
|
||||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
||||||
|
|
||||||
# control characters for enabling/disabling echo
|
# control characters for enabling/disabling echo
|
||||||
#
|
#
|
||||||
@@ -330,6 +344,49 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
|
class MultiProcessFd:
|
||||||
|
"""Return an object which stores a file descriptor and can be passed as an
|
||||||
|
argument to a function run with ``multiprocessing.Process``, such that
|
||||||
|
the file descriptor is available in the subprocess."""
|
||||||
|
|
||||||
|
def __init__(self, fd):
|
||||||
|
self._connection = None
|
||||||
|
self._fd = None
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
self._connection = multiprocessing.connection.Connection(fd)
|
||||||
|
else:
|
||||||
|
self._fd = fd
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fd(self):
|
||||||
|
if self._connection:
|
||||||
|
return self._connection._handle
|
||||||
|
else:
|
||||||
|
return self._fd
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self._connection:
|
||||||
|
self._connection.close()
|
||||||
|
else:
|
||||||
|
os.close(self._fd)
|
||||||
|
|
||||||
|
|
||||||
|
def close_connection_and_file(multiprocess_fd, file):
|
||||||
|
# MultiprocessFd is intended to transmit a FD
|
||||||
|
# to a child process, this FD is then opened to a Python File object
|
||||||
|
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||||
|
# multiprocessing.connection.Connection; Connection closes the FD
|
||||||
|
# when it is deleted, and prints a warning about duplicate closure if
|
||||||
|
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||||
|
# simple FD; closing the FD here appears to conflict with
|
||||||
|
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||||
|
# to choose whether to close the File or the Connection.
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
multiprocess_fd.close()
|
||||||
|
else:
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def replace_environment(env):
|
def replace_environment(env):
|
||||||
"""Replace the current environment (`os.environ`) with `env`.
|
"""Replace the current environment (`os.environ`) with `env`.
|
||||||
@@ -487,20 +544,22 @@ def __enter__(self):
|
|||||||
# forcing debug output.
|
# forcing debug output.
|
||||||
self._saved_debug = tty._debug
|
self._saved_debug = tty._debug
|
||||||
|
|
||||||
# Pipe for redirecting output to logger
|
# OS-level pipe for redirecting output to logger
|
||||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
read_fd, write_fd = os.pipe()
|
||||||
|
|
||||||
# Pipe for communication back from the daemon
|
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||||
|
|
||||||
|
# Multiprocessing pipe for communication back from the daemon
|
||||||
# Currently only used to save echo value between uses
|
# Currently only used to save echo value between uses
|
||||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||||
|
|
||||||
# Sets a daemon that writes to file what it reads from a pipe
|
# Sets a daemon that writes to file what it reads from a pipe
|
||||||
try:
|
try:
|
||||||
# need to pass this b/c multiprocessing closes stdin in child.
|
# need to pass this b/c multiprocessing closes stdin in child.
|
||||||
input_fd = None
|
input_multiprocess_fd = None
|
||||||
try:
|
try:
|
||||||
if sys.stdin.isatty():
|
if sys.stdin.isatty():
|
||||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# just don't forward input if this fails
|
# just don't forward input if this fails
|
||||||
pass
|
pass
|
||||||
@@ -509,9 +568,9 @@ def __enter__(self):
|
|||||||
self.process = multiprocessing.Process(
|
self.process = multiprocessing.Process(
|
||||||
target=_writer_daemon,
|
target=_writer_daemon,
|
||||||
args=(
|
args=(
|
||||||
input_fd,
|
input_multiprocess_fd,
|
||||||
read_fd,
|
read_multiprocess_fd,
|
||||||
self.write_fd,
|
write_fd,
|
||||||
self.echo,
|
self.echo,
|
||||||
self.log_file,
|
self.log_file,
|
||||||
child_pipe,
|
child_pipe,
|
||||||
@@ -522,9 +581,9 @@ def __enter__(self):
|
|||||||
self.process.start()
|
self.process.start()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if input_fd:
|
if input_multiprocess_fd:
|
||||||
input_fd.close()
|
input_multiprocess_fd.close()
|
||||||
read_fd.close()
|
read_multiprocess_fd.close()
|
||||||
|
|
||||||
# Flush immediately before redirecting so that anything buffered
|
# Flush immediately before redirecting so that anything buffered
|
||||||
# goes to the original stream
|
# goes to the original stream
|
||||||
@@ -542,9 +601,9 @@ def __enter__(self):
|
|||||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||||
|
|
||||||
# redirect to the pipe we created above
|
# redirect to the pipe we created above
|
||||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
os.dup2(write_fd, sys.stdout.fileno())
|
||||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
os.dup2(write_fd, sys.stderr.fileno())
|
||||||
self.write_fd.close()
|
os.close(write_fd)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Handle I/O the Python way. This won't redirect lower-level
|
# Handle I/O the Python way. This won't redirect lower-level
|
||||||
@@ -557,7 +616,7 @@ def __enter__(self):
|
|||||||
self._saved_stderr = sys.stderr
|
self._saved_stderr = sys.stderr
|
||||||
|
|
||||||
# create a file object for the pipe; redirect to it.
|
# create a file object for the pipe; redirect to it.
|
||||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||||
sys.stdout = pipe_fd_out
|
sys.stdout = pipe_fd_out
|
||||||
sys.stderr = pipe_fd_out
|
sys.stderr = pipe_fd_out
|
||||||
|
|
||||||
@@ -593,7 +652,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||||||
else:
|
else:
|
||||||
sys.stdout = self._saved_stdout
|
sys.stdout = self._saved_stdout
|
||||||
sys.stderr = self._saved_stderr
|
sys.stderr = self._saved_stderr
|
||||||
self.write_fd.close()
|
|
||||||
|
|
||||||
# print log contents in parent if needed.
|
# print log contents in parent if needed.
|
||||||
if self.log_file.write_in_parent:
|
if self.log_file.write_in_parent:
|
||||||
@@ -807,14 +865,14 @@ def force_echo(self):
|
|||||||
|
|
||||||
|
|
||||||
def _writer_daemon(
|
def _writer_daemon(
|
||||||
stdin_fd: Optional[Connection],
|
stdin_multiprocess_fd,
|
||||||
read_fd: Connection,
|
read_multiprocess_fd,
|
||||||
write_fd: Connection,
|
write_fd,
|
||||||
echo: bool,
|
echo,
|
||||||
log_file_wrapper: FileWrapper,
|
log_file_wrapper,
|
||||||
control_fd: Connection,
|
control_pipe,
|
||||||
filter_fn: Optional[Callable[[str], str]],
|
filter_fn,
|
||||||
) -> None:
|
):
|
||||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||||
|
|
||||||
The daemon receives output from the parent process and writes it both
|
The daemon receives output from the parent process and writes it both
|
||||||
@@ -851,37 +909,43 @@ def _writer_daemon(
|
|||||||
``StringIO`` in the parent. This is mainly for testing.
|
``StringIO`` in the parent. This is mainly for testing.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
stdin_fd: optional input from the terminal
|
stdin_multiprocess_fd (int): input from the terminal
|
||||||
read_fd: pipe for reading from parent's redirected stdout
|
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
stdout
|
||||||
daemons
|
echo (bool): initial echo setting -- controlled by user and
|
||||||
log_file_wrapper: file to log all output
|
preserved across multiple writer daemons
|
||||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
log_file_wrapper (FileWrapper): file to log all output
|
||||||
filter_fn: optional function to filter each line of output
|
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||||
|
information to the parent
|
||||||
|
filter_fn (callable, optional): function to filter each line of output
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
# If this process was forked, then it will inherit file descriptors from
|
||||||
write_fd.close()
|
# the parent process. This process depends on closing all instances of
|
||||||
|
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||||
|
# here. Forking is the process spawning method everywhere except Mac OS
|
||||||
|
# for Python >= 3.8 and on Windows
|
||||||
|
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||||
|
os.close(write_fd)
|
||||||
|
|
||||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||||
# that prevents unbuffered text I/O.
|
# that prevents unbuffered text I/O.
|
||||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||||
# 3. closefd=False because Connection has "ownership"
|
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
|
||||||
|
|
||||||
if stdin_fd:
|
if stdin_multiprocess_fd:
|
||||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||||
else:
|
else:
|
||||||
stdin_file = None
|
stdin = None
|
||||||
|
|
||||||
# list of streams to select from
|
# list of streams to select from
|
||||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||||
force_echo = False # parent can force echo for certain output
|
force_echo = False # parent can force echo for certain output
|
||||||
|
|
||||||
log_file = log_file_wrapper.unwrap()
|
log_file = log_file_wrapper.unwrap()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with keyboard_input(stdin_file) as kb:
|
with keyboard_input(stdin) as kb:
|
||||||
while True:
|
while True:
|
||||||
# fix the terminal settings if we recently came to
|
# fix the terminal settings if we recently came to
|
||||||
# the foreground
|
# the foreground
|
||||||
@@ -894,12 +958,12 @@ def _writer_daemon(
|
|||||||
# Allow user to toggle echo with 'v' key.
|
# Allow user to toggle echo with 'v' key.
|
||||||
# Currently ignores other chars.
|
# Currently ignores other chars.
|
||||||
# only read stdin if we're in the foreground
|
# only read stdin if we're in the foreground
|
||||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
if stdin in rlist and not _is_background_tty(stdin):
|
||||||
# it's possible to be backgrounded between the above
|
# it's possible to be backgrounded between the above
|
||||||
# check and the read, so we ignore SIGTTIN here.
|
# check and the read, so we ignore SIGTTIN here.
|
||||||
with ignore_signal(signal.SIGTTIN):
|
with ignore_signal(signal.SIGTTIN):
|
||||||
try:
|
try:
|
||||||
if stdin_file.read(1) == "v":
|
if stdin.read(1) == "v":
|
||||||
echo = not echo
|
echo = not echo
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
# If SIGTTIN is ignored, the system gives EIO
|
# If SIGTTIN is ignored, the system gives EIO
|
||||||
@@ -908,13 +972,13 @@ def _writer_daemon(
|
|||||||
if e.errno != errno.EIO:
|
if e.errno != errno.EIO:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if read_file in rlist:
|
if in_pipe in rlist:
|
||||||
line_count = 0
|
line_count = 0
|
||||||
try:
|
try:
|
||||||
while line_count < 100:
|
while line_count < 100:
|
||||||
# Handle output from the calling process.
|
# Handle output from the calling process.
|
||||||
try:
|
try:
|
||||||
line = _retry(read_file.readline)()
|
line = _retry(in_pipe.readline)()
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# installs like --test=root gpgme produce non-UTF8 logs
|
# installs like --test=root gpgme produce non-UTF8 logs
|
||||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||||
@@ -943,7 +1007,7 @@ def _writer_daemon(
|
|||||||
if xoff in controls:
|
if xoff in controls:
|
||||||
force_echo = False
|
force_echo = False
|
||||||
|
|
||||||
if not _input_available(read_file):
|
if not _input_available(in_pipe):
|
||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
if line_count > 0:
|
if line_count > 0:
|
||||||
@@ -958,14 +1022,14 @@ def _writer_daemon(
|
|||||||
finally:
|
finally:
|
||||||
# send written data back to parent if we used a StringIO
|
# send written data back to parent if we used a StringIO
|
||||||
if isinstance(log_file, io.StringIO):
|
if isinstance(log_file, io.StringIO):
|
||||||
control_fd.send(log_file.getvalue())
|
control_pipe.send(log_file.getvalue())
|
||||||
log_file_wrapper.close()
|
log_file_wrapper.close()
|
||||||
read_fd.close()
|
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||||
if stdin_fd:
|
if stdin_multiprocess_fd:
|
||||||
stdin_fd.close()
|
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||||
|
|
||||||
# send echo value back to the parent so it can be preserved.
|
# send echo value back to the parent so it can be preserved.
|
||||||
control_fd.send(echo)
|
control_pipe.send(echo)
|
||||||
|
|
||||||
|
|
||||||
def _retry(function):
|
def _retry(function):
|
||||||
|
|||||||
@@ -4,21 +4,9 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.22.6.dev0"
|
__version__ = "0.23.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
|
||||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
|
||||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
|
||||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
|
||||||
#: Spack version.
|
|
||||||
package_api_version = (1, 0)
|
|
||||||
|
|
||||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
|
||||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
|
||||||
#: compatibility with vX.0.
|
|
||||||
min_package_api_version = (1, 0)
|
|
||||||
|
|
||||||
|
|
||||||
def __try_int(v):
|
def __try_int(v):
|
||||||
try:
|
try:
|
||||||
@@ -31,4 +19,4 @@ def __try_int(v):
|
|||||||
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["spack_version_info", "spack_version", "package_api_version", "min_package_api_version"]
|
__all__ = ["spack_version_info", "spack_version"]
|
||||||
|
|||||||
@@ -421,6 +421,10 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||||
)
|
)
|
||||||
|
github_pull_commits_re = (
|
||||||
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
|
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||||
|
)
|
||||||
# Only .diff URLs have stable/full hashes:
|
# Only .diff URLs have stable/full hashes:
|
||||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||||
gitlab_patch_url_re = (
|
gitlab_patch_url_re = (
|
||||||
@@ -436,14 +440,24 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not isinstance(patch, spack.patch.UrlPatch):
|
if not isinstance(patch, spack.patch.UrlPatch):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if re.match(github_patch_url_re, patch.url):
|
if re.match(github_pull_commits_re, patch.url):
|
||||||
|
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||||
|
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||||
|
errors.append(
|
||||||
|
error_cls(
|
||||||
|
f"patch URL in package {pkg_cls.name} "
|
||||||
|
+ "must not be a pull request commit; "
|
||||||
|
+ f"instead use {url}",
|
||||||
|
[patch.url],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif re.match(github_patch_url_re, patch.url):
|
||||||
full_index_arg = "?full_index=1"
|
full_index_arg = "?full_index=1"
|
||||||
if not patch.url.endswith(full_index_arg):
|
if not patch.url.endswith(full_index_arg):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
"patch URL in package {0} must end with {1}".format(
|
f"patch URL in package {pkg_cls.name} "
|
||||||
pkg_cls.name, full_index_arg
|
+ f"must end with {full_index_arg}",
|
||||||
),
|
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -451,9 +465,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not patch.url.endswith(".diff"):
|
if not patch.url.endswith(".diff"):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
"patch URL in package {0} must end with .diff".format(
|
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||||
pkg_cls.name
|
|
||||||
),
|
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -23,6 +23,7 @@
|
|||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||||
|
from urllib.error import HTTPError, URLError
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -898,8 +899,9 @@ def url_read_method(url):
|
|||||||
try:
|
try:
|
||||||
_, _, spec_file = web_util.read_from_url(url)
|
_, _, spec_file = web_util.read_from_url(url)
|
||||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||||
except web_util.SpackWebError as e:
|
except (URLError, web_util.SpackWebError) as url_err:
|
||||||
tty.error(f"Error reading specfile: {url}: {e}")
|
tty.error("Error reading specfile: {0}".format(url))
|
||||||
|
tty.error(url_err)
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -2039,17 +2041,21 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||||
specfile_is_signed = True
|
specfile_is_signed = True
|
||||||
except web_util.SpackWebError as e1:
|
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||||
except web_util.SpackWebError as e2:
|
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
"Did not find {0} on {1}".format(
|
||||||
e1,
|
specfile_name, buildcache_fetch_url_signed_json
|
||||||
|
),
|
||||||
|
url_err,
|
||||||
level=2,
|
level=2,
|
||||||
)
|
)
|
||||||
tty.debug(
|
tty.debug(
|
||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||||
|
url_err_x,
|
||||||
|
level=2,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
@@ -2134,9 +2140,6 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
for mirror in mirror_collection.values():
|
for mirror in mirror_collection.values():
|
||||||
fetch_url = mirror.fetch_url
|
fetch_url = mirror.fetch_url
|
||||||
# TODO: oci:// does not support signing.
|
|
||||||
if fetch_url.startswith("oci://"):
|
|
||||||
continue
|
|
||||||
keys_url = url_util.join(
|
keys_url = url_util.join(
|
||||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
||||||
)
|
)
|
||||||
@@ -2147,12 +2150,19 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
try:
|
try:
|
||||||
_, _, json_file = web_util.read_from_url(keys_index)
|
_, _, json_file = web_util.read_from_url(keys_index)
|
||||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||||
except web_util.SpackWebError as url_err:
|
except (URLError, web_util.SpackWebError) as url_err:
|
||||||
if web_util.url_exists(keys_index):
|
if web_util.url_exists(keys_index):
|
||||||
|
err_msg = [
|
||||||
|
"Unable to find public keys in {0},",
|
||||||
|
" caught exception attempting to read from {1}.",
|
||||||
|
]
|
||||||
|
|
||||||
tty.error(
|
tty.error(
|
||||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
"".join(err_msg).format(
|
||||||
f" caught exception attempting to read from {url_util.format(keys_index)}."
|
url_util.format(fetch_url), url_util.format(keys_index)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
tty.debug(url_err)
|
tty.debug(url_err)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
@@ -2432,7 +2442,7 @@ def get_remote_hash(self):
|
|||||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError):
|
except urllib.error.URLError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Validate the hash
|
# Validate the hash
|
||||||
@@ -2454,7 +2464,7 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except urllib.error.URLError as e:
|
||||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -2495,7 +2505,10 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
|||||||
def conditional_fetch(self) -> FetchIndexResult:
|
def conditional_fetch(self) -> FetchIndexResult:
|
||||||
# Just do a conditional fetch immediately
|
# Just do a conditional fetch immediately
|
||||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
headers = {
|
||||||
|
"User-Agent": web_util.SPACK_USER_AGENT,
|
||||||
|
"If-None-Match": '"{}"'.format(self.etag),
|
||||||
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
||||||
@@ -2503,14 +2516,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
if e.getcode() == 304:
|
if e.getcode() == 304:
|
||||||
# Not modified; that means fresh.
|
# Not modified; that means fresh.
|
||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except urllib.error.URLError as e:
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
|
||||||
|
|
||||||
headers = response.headers
|
headers = response.headers
|
||||||
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
|
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
|
||||||
@@ -2541,19 +2554,21 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except urllib.error.URLError as e:
|
||||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
raise FetchIndexError(
|
||||||
|
"Could not fetch manifest from {}".format(url_manifest), e
|
||||||
|
) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.loads(response.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||||
|
|
||||||
# Get first blob hash, which should be the index.json
|
# Get first blob hash, which should be the index.json
|
||||||
try:
|
try:
|
||||||
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||||
|
|
||||||
# Fresh?
|
# Fresh?
|
||||||
if index_digest.digest == self.local_hash:
|
if index_digest.digest == self.local_hash:
|
||||||
|
|||||||
@@ -213,15 +213,18 @@ def _root_spec(spec_str: str) -> str:
|
|||||||
Args:
|
Args:
|
||||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||||
"""
|
"""
|
||||||
# Add a compiler requirement to the root spec.
|
# Add a compiler and platform requirement to the root spec.
|
||||||
platform = str(spack.platforms.host())
|
platform = str(spack.platforms.host())
|
||||||
|
|
||||||
if platform == "darwin":
|
if platform == "darwin":
|
||||||
spec_str += " %apple-clang"
|
spec_str += " %apple-clang"
|
||||||
|
elif platform == "windows":
|
||||||
|
spec_str += " %msvc"
|
||||||
elif platform == "linux":
|
elif platform == "linux":
|
||||||
spec_str += " %gcc"
|
spec_str += " %gcc"
|
||||||
elif platform == "freebsd":
|
elif platform == "freebsd":
|
||||||
spec_str += " %clang"
|
spec_str += " %clang"
|
||||||
|
spec_str += f" platform={platform}"
|
||||||
target = archspec.cpu.host().family
|
target = archspec.cpu.host().family
|
||||||
spec_str += f" target={target}"
|
spec_str += f" target={target}"
|
||||||
|
|
||||||
|
|||||||
@@ -597,10 +597,7 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
|||||||
current = copy.copy(entry)
|
current = copy.copy(entry)
|
||||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||||
try:
|
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
current.update(spack.util.spack_yaml.load(stream))
|
||||||
current.update(spack.util.spack_yaml.load(stream))
|
list_of_sources.append(current)
|
||||||
list_of_sources.append(current)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
return list_of_sources
|
return list_of_sources
|
||||||
|
|||||||
@@ -43,8 +43,7 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from multiprocessing.connection import Connection
|
from typing import Dict, List, Set, Tuple
|
||||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -52,6 +51,7 @@
|
|||||||
from llnl.util.lang import dedupe, stable_partition
|
from llnl.util.lang import dedupe, stable_partition
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
from llnl.util.tty.log import MultiProcessFd
|
||||||
|
|
||||||
import spack.build_systems.cmake
|
import spack.build_systems.cmake
|
||||||
import spack.build_systems.meson
|
import spack.build_systems.meson
|
||||||
@@ -92,7 +92,7 @@
|
|||||||
)
|
)
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.log_parse import make_log_context, parse_log_events
|
from spack.util.log_parse import make_log_context, parse_log_events
|
||||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
from spack.util.module_cmd import load_module, path_from_modules
|
||||||
|
|
||||||
#
|
#
|
||||||
# This can be set by the user to globally disable parallel builds.
|
# This can be set by the user to globally disable parallel builds.
|
||||||
@@ -191,14 +191,6 @@ def __call__(self, *args, **kwargs):
|
|||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def _on_cray():
|
|
||||||
host_platform = spack.platforms.host()
|
|
||||||
host_os = host_platform.operating_system("default_os")
|
|
||||||
on_cray = str(host_platform) == "cray"
|
|
||||||
using_cnl = re.match(r"cnl\d+", str(host_os))
|
|
||||||
return on_cray, using_cnl
|
|
||||||
|
|
||||||
|
|
||||||
def clean_environment():
|
def clean_environment():
|
||||||
# Stuff in here sanitizes the build environment to eliminate
|
# Stuff in here sanitizes the build environment to eliminate
|
||||||
# anything the user has set that may interfere. We apply it immediately
|
# anything the user has set that may interfere. We apply it immediately
|
||||||
@@ -242,17 +234,6 @@ def clean_environment():
|
|||||||
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
||||||
env.unset(varname)
|
env.unset(varname)
|
||||||
|
|
||||||
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
|
||||||
# interference with Spack dependencies.
|
|
||||||
# CNL requires these variables to be set (or at least some of them,
|
|
||||||
# depending on the CNL version).
|
|
||||||
on_cray, using_cnl = _on_cray()
|
|
||||||
if on_cray and not using_cnl:
|
|
||||||
env.unset("CRAY_LD_LIBRARY_PATH")
|
|
||||||
for varname in os.environ.keys():
|
|
||||||
if "PKGCONF" in varname:
|
|
||||||
env.unset(varname)
|
|
||||||
|
|
||||||
# Unset the following variables because they can affect installation of
|
# Unset the following variables because they can affect installation of
|
||||||
# Autotools and CMake packages.
|
# Autotools and CMake packages.
|
||||||
build_system_vars = [
|
build_system_vars = [
|
||||||
@@ -382,11 +363,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
_add_werror_handling(keep_werror, env)
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
# Don't set on cray platform because the targeting module handles this
|
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||||
if spec.satisfies("platform=cray"):
|
|
||||||
isa_arg = ""
|
|
||||||
else:
|
|
||||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||||
|
|
||||||
# Trap spack-tracked compiler flags as appropriate.
|
# Trap spack-tracked compiler flags as appropriate.
|
||||||
@@ -480,12 +457,9 @@ def set_wrapper_variables(pkg, env):
|
|||||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||||
|
|
||||||
|
# Find ccache binary and hand it to build environment
|
||||||
if spack.config.get("config:ccache"):
|
if spack.config.get("config:ccache"):
|
||||||
# Enable ccache in the compiler wrapper
|
|
||||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||||
else:
|
|
||||||
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
|
|
||||||
env.set("CCACHE_DISABLE", "1")
|
|
||||||
|
|
||||||
# Gather information about various types of dependencies
|
# Gather information about various types of dependencies
|
||||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||||
@@ -764,7 +738,9 @@ def get_rpaths(pkg):
|
|||||||
# Second module is our compiler mod name. We use that to get rpaths from
|
# Second module is our compiler mod name. We use that to get rpaths from
|
||||||
# module show output.
|
# module show output.
|
||||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||||
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||||
|
if mod_rpath:
|
||||||
|
rpaths.append(mod_rpath)
|
||||||
return list(dedupe(filter_system_paths(rpaths)))
|
return list(dedupe(filter_system_paths(rpaths)))
|
||||||
|
|
||||||
|
|
||||||
@@ -834,14 +810,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
|
||||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
|
||||||
# unnecessary
|
|
||||||
on_cray, _ = _on_cray()
|
|
||||||
if on_cray and not dirty:
|
|
||||||
for mod in ["cray-mpich", "cray-libsci"]:
|
|
||||||
module("unload", mod)
|
|
||||||
|
|
||||||
if target and target.module_name:
|
if target and target.module_name:
|
||||||
load_module(target.module_name)
|
load_module(target.module_name)
|
||||||
|
|
||||||
@@ -1145,60 +1113,18 @@ def get_cmake_prefix_path(pkg):
|
|||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||||
function: Callable,
|
|
||||||
kwargs: Dict,
|
|
||||||
write_pipe: Connection,
|
|
||||||
input_pipe: Optional[Connection],
|
|
||||||
jsfd1: Optional[Connection],
|
|
||||||
jsfd2: Optional[Connection],
|
|
||||||
):
|
):
|
||||||
"""Main entry point in the child process for Spack builds.
|
|
||||||
|
|
||||||
``_setup_pkg_and_run`` is called by the child process created in
|
|
||||||
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
|
||||||
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
|
||||||
|
|
||||||
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
|
||||||
the following:
|
|
||||||
|
|
||||||
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
|
||||||
particular build phase.
|
|
||||||
|
|
||||||
* ``BaseException``: any exception raised by a child build process, which will be
|
|
||||||
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
|
||||||
raised in the parent.
|
|
||||||
|
|
||||||
* The return value of ``function()``, which can be anything (except an exception).
|
|
||||||
This is returned to the caller.
|
|
||||||
|
|
||||||
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
|
||||||
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
|
||||||
them automatically in the child if they are not passed at process creation time.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
serialized_pkg: Spack package install context object (serialized form of the
|
|
||||||
package that we'll build in the child process).
|
|
||||||
function: function to call in the child process; serialized_pkg is passed to
|
|
||||||
this as the first argument.
|
|
||||||
kwargs: additional keyword arguments to pass to ``function()``.
|
|
||||||
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
|
||||||
child *must* send a result (or an error) back to parent on.
|
|
||||||
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
|
||||||
jsfd1: gmake Jobserver file descriptor 1.
|
|
||||||
jsfd2: gmake Jobserver file descriptor 2.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
context: str = kwargs.get("context", "build")
|
context: str = kwargs.get("context", "build")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
# We are in the child process. Python sets sys.stdin to
|
||||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
# open(os.devnull) to prevent our process and its parent from
|
||||||
# assume that the parent process is not going to read from it till we are done with the
|
# simultaneously reading from the original stdin. But, we assume
|
||||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
# that the parent process is not going to read from it till we
|
||||||
if input_pipe is not None:
|
# are done with the child, so we undo Python's precaution.
|
||||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
if input_multiprocess_fd is not None:
|
||||||
|
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||||
|
|
||||||
pkg = serialized_pkg.restore()
|
pkg = serialized_pkg.restore()
|
||||||
|
|
||||||
@@ -1214,14 +1140,13 @@ def _setup_pkg_and_run(
|
|||||||
# Do not create a full ChildError from this, it's not an error
|
# Do not create a full ChildError from this, it's not an error
|
||||||
# it's a control statement.
|
# it's a control statement.
|
||||||
write_pipe.send(e)
|
write_pipe.send(e)
|
||||||
except BaseException as e:
|
except BaseException:
|
||||||
# catch ANYTHING that goes wrong in the child process
|
# catch ANYTHING that goes wrong in the child process
|
||||||
|
exc_type, exc, tb = sys.exc_info()
|
||||||
|
|
||||||
# Need to unwind the traceback in the child because traceback
|
# Need to unwind the traceback in the child because traceback
|
||||||
# objects can't be sent to the parent.
|
# objects can't be sent to the parent.
|
||||||
exc_type = type(e)
|
tb_string = traceback.format_exc()
|
||||||
tb = e.__traceback__
|
|
||||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
|
||||||
|
|
||||||
# build up some context from the offending package so we can
|
# build up some context from the offending package so we can
|
||||||
# show that, too.
|
# show that, too.
|
||||||
@@ -1238,8 +1163,8 @@ def _setup_pkg_and_run(
|
|||||||
elif context == "test":
|
elif context == "test":
|
||||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||||
|
|
||||||
error_msg = str(e)
|
error_msg = str(exc)
|
||||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||||
process = "test the installation" if context == "test" else "build from sources"
|
process = "test the installation" if context == "test" else "build from sources"
|
||||||
error_msg = (
|
error_msg = (
|
||||||
"The '{}' package cannot find an attribute while trying to {}. "
|
"The '{}' package cannot find an attribute while trying to {}. "
|
||||||
@@ -1249,7 +1174,7 @@ def _setup_pkg_and_run(
|
|||||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||||
).format(pkg.name, process, context)
|
).format(pkg.name, process, context)
|
||||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||||
|
|
||||||
# make a pickleable exception to send to parent.
|
# make a pickleable exception to send to parent.
|
||||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||||
@@ -1267,8 +1192,8 @@ def _setup_pkg_and_run(
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
write_pipe.close()
|
write_pipe.close()
|
||||||
if input_pipe is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_pipe.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
|
|
||||||
def start_build_process(pkg, function, kwargs):
|
def start_build_process(pkg, function, kwargs):
|
||||||
@@ -1295,9 +1220,23 @@ def child_fun():
|
|||||||
If something goes wrong, the child process catches the error and
|
If something goes wrong, the child process catches the error and
|
||||||
passes it to the parent wrapped in a ChildError. The parent is
|
passes it to the parent wrapped in a ChildError. The parent is
|
||||||
expected to handle (or re-raise) the ChildError.
|
expected to handle (or re-raise) the ChildError.
|
||||||
|
|
||||||
|
This uses `multiprocessing.Process` to create the child process. The
|
||||||
|
mechanism used to create the process differs on different operating
|
||||||
|
systems and for different versions of Python. In some cases "fork"
|
||||||
|
is used (i.e. the "fork" system call) and some cases it starts an
|
||||||
|
entirely new Python interpreter process (in the docs this is referred
|
||||||
|
to as the "spawn" start method). Breaking it down by OS:
|
||||||
|
|
||||||
|
- Linux always uses fork.
|
||||||
|
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||||
|
- Windows always uses the "spawn" start method.
|
||||||
|
|
||||||
|
For more information on `multiprocessing` child process creation
|
||||||
|
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||||
"""
|
"""
|
||||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||||
input_fd = None
|
input_multiprocess_fd = None
|
||||||
jobserver_fd1 = None
|
jobserver_fd1 = None
|
||||||
jobserver_fd2 = None
|
jobserver_fd2 = None
|
||||||
|
|
||||||
@@ -1306,13 +1245,14 @@ def child_fun():
|
|||||||
try:
|
try:
|
||||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
input_fd = os.dup(sys.stdin.fileno())
|
||||||
|
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||||
mflags = os.environ.get("MAKEFLAGS", False)
|
mflags = os.environ.get("MAKEFLAGS", False)
|
||||||
if mflags:
|
if mflags:
|
||||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||||
if m:
|
if m:
|
||||||
jobserver_fd1 = Connection(int(m.group(1)))
|
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||||
jobserver_fd2 = Connection(int(m.group(2)))
|
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||||
|
|
||||||
p = multiprocessing.Process(
|
p = multiprocessing.Process(
|
||||||
target=_setup_pkg_and_run,
|
target=_setup_pkg_and_run,
|
||||||
@@ -1321,7 +1261,7 @@ def child_fun():
|
|||||||
function,
|
function,
|
||||||
kwargs,
|
kwargs,
|
||||||
write_pipe,
|
write_pipe,
|
||||||
input_fd,
|
input_multiprocess_fd,
|
||||||
jobserver_fd1,
|
jobserver_fd1,
|
||||||
jobserver_fd2,
|
jobserver_fd2,
|
||||||
),
|
),
|
||||||
@@ -1341,8 +1281,8 @@ def child_fun():
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Close the input stream in the parent process
|
# Close the input stream in the parent process
|
||||||
if input_fd is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_fd.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
def exitcode_msg(p):
|
def exitcode_msg(p):
|
||||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||||
|
|||||||
@@ -162,7 +162,9 @@ def initconfig_compiler_entries(self):
|
|||||||
ld_flags = " ".join(flags["ldflags"])
|
ld_flags = " ".join(flags["ldflags"])
|
||||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||||
# CMake has separate linker arguments for types of builds.
|
# CMake has separate linker arguments for types of builds.
|
||||||
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
||||||
|
# is used by the archiver, so don't include "STATIC" in this loop:
|
||||||
|
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
||||||
ld_string = ld_format_string.format(ld_type)
|
ld_string = ld_format_string.format(ld_type)
|
||||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||||
|
|
||||||
|
|||||||
@@ -110,9 +110,8 @@ def cuda_flags(arch_list):
|
|||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||||
# platform=linux, since they should also apply to platform=cray, and may
|
# platform=linux, since they may apply to platform=darwin. We currently
|
||||||
# apply to platform=darwin. We currently do not provide conflicts for
|
# do not provide conflicts for platform=darwin with %apple-clang.
|
||||||
# platform=darwin with %apple-clang.
|
|
||||||
|
|
||||||
# Linux x86_64 compiler conflicts from here:
|
# Linux x86_64 compiler conflicts from here:
|
||||||
# https://gist.github.com/ax3l/9489132
|
# https://gist.github.com/ax3l/9489132
|
||||||
@@ -137,11 +136,14 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
|
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||||
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
|
|||||||
@@ -846,6 +846,7 @@ def scalapack_libs(self):
|
|||||||
"^mpich@2:" in spec_root
|
"^mpich@2:" in spec_root
|
||||||
or "^cray-mpich" in spec_root
|
or "^cray-mpich" in spec_root
|
||||||
or "^mvapich2" in spec_root
|
or "^mvapich2" in spec_root
|
||||||
|
or "^mvapich" in spec_root
|
||||||
or "^intel-mpi" in spec_root
|
or "^intel-mpi" in spec_root
|
||||||
or "^intel-oneapi-mpi" in spec_root
|
or "^intel-oneapi-mpi" in spec_root
|
||||||
or "^intel-parallel-studio" in spec_root
|
or "^intel-parallel-studio" in spec_root
|
||||||
@@ -936,32 +937,15 @@ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_o
|
|||||||
"I_MPI_ROOT": self.normalize_path("mpi"),
|
"I_MPI_ROOT": self.normalize_path("mpi"),
|
||||||
}
|
}
|
||||||
|
|
||||||
# CAUTION - SIMILAR code in:
|
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||||
# var/spack/repos/builtin/packages/mpich/package.py
|
wrapper_vars.update(
|
||||||
# var/spack/repos/builtin/packages/openmpi/package.py
|
{
|
||||||
# var/spack/repos/builtin/packages/mvapich2/package.py
|
"MPICC": compiler_wrapper_commands["MPICC"],
|
||||||
#
|
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
||||||
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
|
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
||||||
if "platform=cray" in self.spec:
|
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
||||||
# TODO: Confirm
|
}
|
||||||
wrapper_vars.update(
|
)
|
||||||
{
|
|
||||||
"MPICC": compilers_of_client["CC"],
|
|
||||||
"MPICXX": compilers_of_client["CXX"],
|
|
||||||
"MPIF77": compilers_of_client["F77"],
|
|
||||||
"MPIF90": compilers_of_client["F90"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
|
||||||
wrapper_vars.update(
|
|
||||||
{
|
|
||||||
"MPICC": compiler_wrapper_commands["MPICC"],
|
|
||||||
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
|
||||||
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
|
||||||
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ensure that the directory containing the compiler wrappers is in the
|
# Ensure that the directory containing the compiler wrappers is in the
|
||||||
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
|||||||
build_system("msbuild")
|
build_system("msbuild")
|
||||||
conflicts("platform=linux", when="build_system=msbuild")
|
conflicts("platform=linux", when="build_system=msbuild")
|
||||||
conflicts("platform=darwin", when="build_system=msbuild")
|
conflicts("platform=darwin", when="build_system=msbuild")
|
||||||
conflicts("platform=cray", when="build_system=msbuild")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("msbuild")
|
@spack.builder.builder("msbuild")
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class NMakePackage(spack.package_base.PackageBase):
|
|||||||
build_system("nmake")
|
build_system("nmake")
|
||||||
conflicts("platform=linux", when="build_system=nmake")
|
conflicts("platform=linux", when="build_system=nmake")
|
||||||
conflicts("platform=darwin", when="build_system=nmake")
|
conflicts("platform=darwin", when="build_system=nmake")
|
||||||
conflicts("platform=cray", when="build_system=nmake")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("nmake")
|
@spack.builder.builder("nmake")
|
||||||
|
|||||||
@@ -36,9 +36,8 @@ class IntelOneApiPackage(Package):
|
|||||||
"target=ppc64:",
|
"target=ppc64:",
|
||||||
"target=ppc64le:",
|
"target=ppc64le:",
|
||||||
"target=aarch64:",
|
"target=aarch64:",
|
||||||
"platform=darwin:",
|
"platform=darwin",
|
||||||
"platform=cray:",
|
"platform=windows",
|
||||||
"platform=windows:",
|
|
||||||
]:
|
]:
|
||||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||||
|
|
||||||
|
|||||||
@@ -553,10 +553,9 @@ def generate_gitlab_ci_yaml(
|
|||||||
env,
|
env,
|
||||||
print_summary,
|
print_summary,
|
||||||
output_file,
|
output_file,
|
||||||
|
*,
|
||||||
prune_dag=False,
|
prune_dag=False,
|
||||||
check_index_only=False,
|
check_index_only=False,
|
||||||
run_optimizer=False,
|
|
||||||
use_dependencies=False,
|
|
||||||
artifacts_root=None,
|
artifacts_root=None,
|
||||||
remote_mirror_override=None,
|
remote_mirror_override=None,
|
||||||
):
|
):
|
||||||
@@ -577,12 +576,6 @@ def generate_gitlab_ci_yaml(
|
|||||||
this mode results in faster yaml generation time). Otherwise, also
|
this mode results in faster yaml generation time). Otherwise, also
|
||||||
check each spec directly by url (useful if there is no index or it
|
check each spec directly by url (useful if there is no index or it
|
||||||
might be out of date).
|
might be out of date).
|
||||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
|
||||||
try to reduce the size (attempts to collect repeated configuration
|
|
||||||
and replace with definitions).)
|
|
||||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
|
||||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
|
||||||
be configured to handle more than a few "needs" per job.
|
|
||||||
artifacts_root (str): Path where artifacts like logs, environment
|
artifacts_root (str): Path where artifacts like logs, environment
|
||||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||||
requires this to be within the project directory.
|
requires this to be within the project directory.
|
||||||
@@ -1113,7 +1106,7 @@ def main_script_replacements(cmd):
|
|||||||
if cdash_handler and cdash_handler.auth_token:
|
if cdash_handler and cdash_handler.auth_token:
|
||||||
try:
|
try:
|
||||||
cdash_handler.populate_buildgroup(all_job_names)
|
cdash_handler.populate_buildgroup(all_job_names)
|
||||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
except (SpackError, HTTPError, URLError) as err:
|
||||||
tty.warn(f"Problem populating buildgroup: {err}")
|
tty.warn(f"Problem populating buildgroup: {err}")
|
||||||
else:
|
else:
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
@@ -1273,17 +1266,6 @@ def main_script_replacements(cmd):
|
|||||||
with open(copy_specs_file, "w") as fd:
|
with open(copy_specs_file, "w") as fd:
|
||||||
fd.write(json.dumps(buildcache_copies))
|
fd.write(json.dumps(buildcache_copies))
|
||||||
|
|
||||||
# TODO(opadron): remove this or refactor
|
|
||||||
if run_optimizer:
|
|
||||||
import spack.ci_optimization as ci_opt
|
|
||||||
|
|
||||||
output_object = ci_opt.optimizer(output_object)
|
|
||||||
|
|
||||||
# TODO(opadron): remove this or refactor
|
|
||||||
if use_dependencies:
|
|
||||||
import spack.ci_needs_workaround as cinw
|
|
||||||
|
|
||||||
output_object = cinw.needs_to_dependencies(output_object)
|
|
||||||
else:
|
else:
|
||||||
# No jobs were generated
|
# No jobs were generated
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
@@ -2100,7 +2082,7 @@ def read_broken_spec(broken_spec_url):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||||
except web_util.SpackWebError:
|
except (URLError, web_util.SpackWebError, HTTPError):
|
||||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import collections.abc
|
|
||||||
|
|
||||||
get_job_name = lambda needs_entry: (
|
|
||||||
needs_entry.get("job")
|
|
||||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
|
||||||
else needs_entry if isinstance(needs_entry, str) else None
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_job(job_entry):
|
|
||||||
if not isinstance(job_entry, collections.abc.Mapping):
|
|
||||||
return job_entry
|
|
||||||
|
|
||||||
needs = job_entry.get("needs")
|
|
||||||
if needs is None:
|
|
||||||
return job_entry
|
|
||||||
|
|
||||||
new_job = {}
|
|
||||||
new_job.update(job_entry)
|
|
||||||
del new_job["needs"]
|
|
||||||
|
|
||||||
new_job["dependencies"] = list(
|
|
||||||
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
|
||||||
)
|
|
||||||
|
|
||||||
return new_job
|
|
||||||
|
|
||||||
|
|
||||||
def needs_to_dependencies(yaml):
|
|
||||||
return dict((k, convert_job(v)) for k, v in yaml.items())
|
|
||||||
@@ -1,363 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import collections
|
|
||||||
import collections.abc
|
|
||||||
import copy
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
|
|
||||||
|
|
||||||
def sort_yaml_obj(obj):
|
|
||||||
if isinstance(obj, collections.abc.Mapping):
|
|
||||||
return syaml.syaml_dict(
|
|
||||||
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
|
||||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
|
||||||
|
|
||||||
return obj
|
|
||||||
|
|
||||||
|
|
||||||
def matches(obj, proto):
|
|
||||||
"""Returns True if the test object "obj" matches the prototype object
|
|
||||||
"proto".
|
|
||||||
|
|
||||||
If obj and proto are mappings, obj matches proto if (key in obj) and
|
|
||||||
(obj[key] matches proto[key]) for every key in proto.
|
|
||||||
|
|
||||||
If obj and proto are sequences, obj matches proto if they are of the same
|
|
||||||
length and (a matches b) for every (a,b) in zip(obj, proto).
|
|
||||||
|
|
||||||
Otherwise, obj matches proto if obj == proto.
|
|
||||||
|
|
||||||
Precondition: proto must not have any reference cycles
|
|
||||||
"""
|
|
||||||
if isinstance(obj, collections.abc.Mapping):
|
|
||||||
if not isinstance(proto, collections.abc.Mapping):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
|
||||||
|
|
||||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
|
||||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if len(obj) != len(proto):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
|
||||||
|
|
||||||
return obj == proto
|
|
||||||
|
|
||||||
|
|
||||||
def subkeys(obj, proto):
|
|
||||||
"""Returns the test mapping "obj" after factoring out the items it has in
|
|
||||||
common with the prototype mapping "proto".
|
|
||||||
|
|
||||||
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
|
||||||
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
|
||||||
for every such key, "k", its corresponding value is:
|
|
||||||
|
|
||||||
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
|
||||||
- b[key] if (key in b) and not matches(a[key], b[key]),
|
|
||||||
or
|
|
||||||
- a[key] otherwise
|
|
||||||
|
|
||||||
|
|
||||||
If obj and proto are mappings, the returned object is the smallest object,
|
|
||||||
"a", such that merge(a, proto) matches obj.
|
|
||||||
|
|
||||||
Otherwise, obj is returned.
|
|
||||||
"""
|
|
||||||
if not (
|
|
||||||
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
|
||||||
):
|
|
||||||
return obj
|
|
||||||
|
|
||||||
new_obj = {}
|
|
||||||
for key, value in obj.items():
|
|
||||||
if key not in proto:
|
|
||||||
new_obj[key] = value
|
|
||||||
continue
|
|
||||||
|
|
||||||
if matches(value, proto[key]) and matches(proto[key], value):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if isinstance(value, collections.abc.Mapping):
|
|
||||||
new_obj[key] = subkeys(value, proto[key])
|
|
||||||
continue
|
|
||||||
|
|
||||||
new_obj[key] = value
|
|
||||||
|
|
||||||
return new_obj
|
|
||||||
|
|
||||||
|
|
||||||
def add_extends(yaml, key):
|
|
||||||
"""Modifies the given object "yaml" so that it includes an "extends" key
|
|
||||||
whose value features "key".
|
|
||||||
|
|
||||||
If "extends" is not in yaml, then yaml is modified such that
|
|
||||||
yaml["extends"] == key.
|
|
||||||
|
|
||||||
If yaml["extends"] is a str, then yaml is modified such that
|
|
||||||
yaml["extends"] == [yaml["extends"], key]
|
|
||||||
|
|
||||||
If yaml["extends"] is a list that does not include key, then key is
|
|
||||||
appended to the list.
|
|
||||||
|
|
||||||
Otherwise, yaml is left unchanged.
|
|
||||||
"""
|
|
||||||
|
|
||||||
has_key = "extends" in yaml
|
|
||||||
extends = yaml.get("extends")
|
|
||||||
|
|
||||||
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
|
||||||
return
|
|
||||||
|
|
||||||
if extends is None:
|
|
||||||
yaml["extends"] = key
|
|
||||||
return
|
|
||||||
|
|
||||||
if isinstance(extends, str):
|
|
||||||
if extends != key:
|
|
||||||
yaml["extends"] = [extends, key]
|
|
||||||
return
|
|
||||||
|
|
||||||
if key not in extends:
|
|
||||||
extends.append(key)
|
|
||||||
|
|
||||||
|
|
||||||
def common_subobject(yaml, sub):
|
|
||||||
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
|
||||||
|
|
||||||
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
|
||||||
|
|
||||||
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
|
||||||
- Otherwise, new[key] = yaml[key].
|
|
||||||
|
|
||||||
If the above match criteria is not satisfied for any such key, then (yaml,
|
|
||||||
None) is returned. The yaml object is returned unchanged.
|
|
||||||
|
|
||||||
Otherwise, each matching value in new is modified as in
|
|
||||||
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
|
||||||
The common_key value is chosen such that it does not match any preexisting
|
|
||||||
key in new. In this case, (new, common_key) is returned.
|
|
||||||
"""
|
|
||||||
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
|
||||||
|
|
||||||
if not match_list:
|
|
||||||
return yaml, None
|
|
||||||
|
|
||||||
common_prefix = ".c"
|
|
||||||
common_index = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
common_key = "".join((common_prefix, str(common_index)))
|
|
||||||
if common_key not in yaml:
|
|
||||||
break
|
|
||||||
common_index += 1
|
|
||||||
|
|
||||||
new_yaml = {}
|
|
||||||
|
|
||||||
for key, val in yaml.items():
|
|
||||||
new_yaml[key] = copy.deepcopy(val)
|
|
||||||
|
|
||||||
if not matches(val, sub):
|
|
||||||
continue
|
|
||||||
|
|
||||||
new_yaml[key] = subkeys(new_yaml[key], sub)
|
|
||||||
add_extends(new_yaml[key], common_key)
|
|
||||||
|
|
||||||
new_yaml[common_key] = sub
|
|
||||||
|
|
||||||
return new_yaml, common_key
|
|
||||||
|
|
||||||
|
|
||||||
def print_delta(name, old, new, applied=None):
|
|
||||||
delta = new - old
|
|
||||||
reldelta = (1000 * delta) // old
|
|
||||||
reldelta = (reldelta // 10, reldelta % 10)
|
|
||||||
|
|
||||||
if applied is None:
|
|
||||||
applied = new <= old
|
|
||||||
|
|
||||||
print(
|
|
||||||
"\n".join(
|
|
||||||
(
|
|
||||||
"{0} {1}:",
|
|
||||||
" before: {2: 10d}",
|
|
||||||
" after : {3: 10d}",
|
|
||||||
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
|
||||||
)
|
|
||||||
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
|
||||||
"""Try applying an optimization pass and return information about the
|
|
||||||
result
|
|
||||||
|
|
||||||
"name" is a string describing the nature of the pass. If it is a non-empty
|
|
||||||
string, summary statistics are also printed to stdout.
|
|
||||||
|
|
||||||
"yaml" is the object to apply the pass to.
|
|
||||||
|
|
||||||
"optimization_pass" is the function implementing the pass to be applied.
|
|
||||||
|
|
||||||
"args" and "kwargs" are the additional arguments to pass to optimization
|
|
||||||
pass. The pass is applied as
|
|
||||||
|
|
||||||
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
|
||||||
|
|
||||||
The pass's results are greedily rejected if it does not modify the original
|
|
||||||
yaml document, or if it produces a yaml document that serializes to a
|
|
||||||
larger string.
|
|
||||||
|
|
||||||
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
|
||||||
(yaml, new_yaml, applied, other_results) otherwise.
|
|
||||||
"""
|
|
||||||
result = optimization_pass(yaml, *args, **kwargs)
|
|
||||||
new_yaml, other_results = result[0], result[1:]
|
|
||||||
|
|
||||||
if new_yaml is yaml:
|
|
||||||
# pass was not applied
|
|
||||||
return (yaml, new_yaml, False, other_results)
|
|
||||||
|
|
||||||
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
|
||||||
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
|
||||||
|
|
||||||
# pass makes the size worse: not applying
|
|
||||||
applied = post_size <= pre_size
|
|
||||||
if applied:
|
|
||||||
yaml, new_yaml = new_yaml, yaml
|
|
||||||
|
|
||||||
if name:
|
|
||||||
print_delta(name, pre_size, post_size, applied)
|
|
||||||
|
|
||||||
return (yaml, new_yaml, applied, other_results)
|
|
||||||
|
|
||||||
|
|
||||||
def build_histogram(iterator, key):
|
|
||||||
"""Builds a histogram of values given an iterable of mappings and a key.
|
|
||||||
|
|
||||||
For each mapping "m" with key "key" in iterator, the value m[key] is
|
|
||||||
considered.
|
|
||||||
|
|
||||||
Returns a list of tuples (hash, count, proportion, value), where
|
|
||||||
|
|
||||||
- "hash" is a sha1sum hash of the value.
|
|
||||||
- "count" is the number of occurences of values that hash to "hash".
|
|
||||||
- "proportion" is the proportion of all values considered above that
|
|
||||||
hash to "hash".
|
|
||||||
- "value" is one of the values considered above that hash to "hash".
|
|
||||||
Which value is chosen when multiple values hash to the same "hash" is
|
|
||||||
undefined.
|
|
||||||
|
|
||||||
The list is sorted in descending order by count, yielding the most
|
|
||||||
frequently occuring hashes first.
|
|
||||||
"""
|
|
||||||
buckets = collections.defaultdict(int)
|
|
||||||
values = {}
|
|
||||||
|
|
||||||
num_objects = 0
|
|
||||||
for obj in iterator:
|
|
||||||
num_objects += 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
val = obj[key]
|
|
||||||
except (KeyError, TypeError):
|
|
||||||
continue
|
|
||||||
|
|
||||||
value_hash = hashlib.sha1()
|
|
||||||
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
|
||||||
value_hash = value_hash.hexdigest()
|
|
||||||
|
|
||||||
buckets[value_hash] += 1
|
|
||||||
values[value_hash] = val
|
|
||||||
|
|
||||||
return [
|
|
||||||
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
|
||||||
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def optimizer(yaml):
|
|
||||||
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
|
||||||
|
|
||||||
# try factoring out commonly repeated portions
|
|
||||||
common_job = {
|
|
||||||
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
|
||||||
"after_script": ['rm -rf "./spack"'],
|
|
||||||
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
|
||||||
}
|
|
||||||
|
|
||||||
# look for a list of tags that appear frequently
|
|
||||||
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
|
||||||
|
|
||||||
# If a list of tags is found, and there are more than one job that uses it,
|
|
||||||
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
|
||||||
# add the list to the prototype object.
|
|
||||||
if tags and count > 1 and proportion >= 0.70:
|
|
||||||
common_job["tags"] = tags
|
|
||||||
|
|
||||||
# apply common object factorization
|
|
||||||
yaml, other, applied, rest = try_optimization_pass(
|
|
||||||
"general common object factorization", yaml, common_subobject, common_job
|
|
||||||
)
|
|
||||||
|
|
||||||
# look for a common script, and try factoring that out
|
|
||||||
_, count, proportion, script = next(
|
|
||||||
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
|
||||||
)
|
|
||||||
|
|
||||||
if script and count > 1 and proportion >= 0.70:
|
|
||||||
yaml, other, applied, rest = try_optimization_pass(
|
|
||||||
"script factorization", yaml, common_subobject, {"script": script}
|
|
||||||
)
|
|
||||||
|
|
||||||
# look for a common before_script, and try factoring that out
|
|
||||||
_, count, proportion, script = next(
|
|
||||||
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
|
||||||
)
|
|
||||||
|
|
||||||
if script and count > 1 and proportion >= 0.70:
|
|
||||||
yaml, other, applied, rest = try_optimization_pass(
|
|
||||||
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
|
||||||
# Try to factor them out.
|
|
||||||
h = build_histogram(
|
|
||||||
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
|
||||||
"SPACK_ROOT_SPEC",
|
|
||||||
)
|
|
||||||
|
|
||||||
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
|
||||||
# environment variable; not just the one that appears with the greatest
|
|
||||||
# frequency. We only require that more than 1 job uses a given instance's
|
|
||||||
# value, because we expect the value to be very large, and so expect even
|
|
||||||
# few-to-one factorizations to yield large space savings.
|
|
||||||
counter = 0
|
|
||||||
for _, count, proportion, spec in h:
|
|
||||||
if count <= 1:
|
|
||||||
continue
|
|
||||||
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
yaml, other, applied, rest = try_optimization_pass(
|
|
||||||
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
|
||||||
yaml,
|
|
||||||
common_subobject,
|
|
||||||
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
|
||||||
)
|
|
||||||
|
|
||||||
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
|
||||||
|
|
||||||
print("\n")
|
|
||||||
print_delta("overall summary", original_size, new_size)
|
|
||||||
print("\n")
|
|
||||||
return yaml
|
|
||||||
@@ -444,7 +444,7 @@ def format_list(specs):
|
|||||||
def filter_loaded_specs(specs):
|
def filter_loaded_specs(specs):
|
||||||
"""Filter a list of specs returning only those that are
|
"""Filter a list of specs returning only those that are
|
||||||
currently loaded."""
|
currently loaded."""
|
||||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||||
return [x for x in specs if x.dag_hash() in hashes]
|
return [x for x in specs if x.dag_hash() in hashes]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -813,7 +813,7 @@ def _push_oci(
|
|||||||
|
|
||||||
def extra_config(spec: Spec):
|
def extra_config(spec: Spec):
|
||||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||||
spec_dict["buildcache_layout_version"] = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
spec_dict["buildcache_layout_version"] = 1
|
||||||
spec_dict["binary_cache_checksum"] = {
|
spec_dict["binary_cache_checksum"] = {
|
||||||
"hash_algorithm": "sha256",
|
"hash_algorithm": "sha256",
|
||||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import warnings
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -73,7 +74,7 @@ def setup_parser(subparser):
|
|||||||
"--optimize",
|
"--optimize",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||||
"run the generated document through a series of optimization passes "
|
"run the generated document through a series of optimization passes "
|
||||||
"designed to reduce the size of the generated file",
|
"designed to reduce the size of the generated file",
|
||||||
)
|
)
|
||||||
@@ -81,7 +82,7 @@ def setup_parser(subparser):
|
|||||||
"--dependencies",
|
"--dependencies",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--buildcache-destination",
|
"--buildcache-destination",
|
||||||
@@ -200,6 +201,18 @@ def ci_generate(args):
|
|||||||
before invoking this command. the value must be the CDash authorization token needed to create
|
before invoking this command. the value must be the CDash authorization token needed to create
|
||||||
a build group and register all generated jobs under it
|
a build group and register all generated jobs under it
|
||||||
"""
|
"""
|
||||||
|
if args.optimize:
|
||||||
|
warnings.warn(
|
||||||
|
"The --optimize option has been deprecated, and currently has no effect. "
|
||||||
|
"It will be removed in Spack v0.24."
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.dependencies:
|
||||||
|
warnings.warn(
|
||||||
|
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||||
|
"It will be removed in Spack v0.24."
|
||||||
|
)
|
||||||
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
|
|
||||||
if args.copy_to:
|
if args.copy_to:
|
||||||
@@ -212,8 +225,6 @@ def ci_generate(args):
|
|||||||
|
|
||||||
output_file = args.output_file
|
output_file = args.output_file
|
||||||
copy_yaml_to = args.copy_to
|
copy_yaml_to = args.copy_to
|
||||||
run_optimizer = args.optimize
|
|
||||||
use_dependencies = args.dependencies
|
|
||||||
prune_dag = args.prune_dag
|
prune_dag = args.prune_dag
|
||||||
index_only = args.index_only
|
index_only = args.index_only
|
||||||
artifacts_root = args.artifacts_root
|
artifacts_root = args.artifacts_root
|
||||||
@@ -234,8 +245,6 @@ def ci_generate(args):
|
|||||||
output_file,
|
output_file,
|
||||||
prune_dag=prune_dag,
|
prune_dag=prune_dag,
|
||||||
check_index_only=index_only,
|
check_index_only=index_only,
|
||||||
run_optimizer=run_optimizer,
|
|
||||||
use_dependencies=use_dependencies,
|
|
||||||
artifacts_root=artifacts_root,
|
artifacts_root=artifacts_root,
|
||||||
remote_mirror_override=buildcache_destination,
|
remote_mirror_override=buildcache_destination,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -106,7 +106,8 @@ def clean(parser, args):
|
|||||||
|
|
||||||
# Then do the cleaning falling through the cases
|
# Then do the cleaning falling through the cases
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||||
|
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
msg = "Cleaning build stage [{0}]"
|
msg = "Cleaning build stage [{0}]"
|
||||||
tty.msg(msg.format(spec.short_spec))
|
tty.msg(msg.format(spec.short_spec))
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
from argparse import ArgumentParser, Namespace
|
from argparse import ArgumentParser, Namespace
|
||||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
@@ -866,6 +867,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
|||||||
prepend_header(args, f)
|
prepend_header(args, f)
|
||||||
formatter(args, f)
|
formatter(args, f)
|
||||||
|
|
||||||
|
if args.update_completion:
|
||||||
|
fs.set_executable(args.update)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
prepend_header(args, sys.stdout)
|
prepend_header(args, sys.stdout)
|
||||||
formatter(args, sys.stdout)
|
formatter(args, sys.stdout)
|
||||||
|
|||||||
@@ -661,32 +661,34 @@ def mirror_name_or_url(m):
|
|||||||
# accidentally to a dir in the current working directory.
|
# accidentally to a dir in the current working directory.
|
||||||
|
|
||||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||||
if "/" in m or "\\" in m or m in (".", ".."):
|
if "/" in m or "\\" in m:
|
||||||
return spack.mirror.Mirror(m)
|
return spack.mirror.Mirror(m)
|
||||||
|
|
||||||
# Otherwise, the named mirror is required to exist.
|
# Otherwise, the named mirror is required to exist.
|
||||||
try:
|
try:
|
||||||
return spack.mirror.require_mirror_name(m)
|
return spack.mirror.require_mirror_name(m)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
raise argparse.ArgumentTypeError(
|
||||||
|
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def mirror_url(url):
|
def mirror_url(url):
|
||||||
try:
|
try:
|
||||||
return spack.mirror.Mirror.from_url(url)
|
return spack.mirror.Mirror.from_url(url)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e)) from e
|
raise argparse.ArgumentTypeError(str(e))
|
||||||
|
|
||||||
|
|
||||||
def mirror_directory(path):
|
def mirror_directory(path):
|
||||||
try:
|
try:
|
||||||
return spack.mirror.Mirror.from_local_path(path)
|
return spack.mirror.Mirror.from_local_path(path)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e)) from e
|
raise argparse.ArgumentTypeError(str(e))
|
||||||
|
|
||||||
|
|
||||||
def mirror_name(name):
|
def mirror_name(name):
|
||||||
try:
|
try:
|
||||||
return spack.mirror.require_mirror_name(name)
|
return spack.mirror.require_mirror_name(name)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e)) from e
|
raise argparse.ArgumentTypeError(str(e))
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.string import plural
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -43,5 +46,9 @@ def concretize(parser, args):
|
|||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||||
if not args.quiet:
|
if not args.quiet:
|
||||||
ev.display_specs(concretized_specs)
|
if concretized_specs:
|
||||||
|
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
||||||
|
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||||
|
else:
|
||||||
|
tty.msg("No new specs to concretize.")
|
||||||
env.write()
|
env.write()
|
||||||
|
|||||||
@@ -9,6 +9,8 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.fetch_strategy
|
||||||
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.version
|
import spack.version
|
||||||
@@ -69,13 +71,15 @@ def _retrieve_develop_source(spec, abspath):
|
|||||||
# We construct a package class ourselves, rather than asking for
|
# We construct a package class ourselves, rather than asking for
|
||||||
# Spec.package, since Spec only allows this when it is concrete
|
# Spec.package, since Spec only allows this when it is concrete
|
||||||
package = pkg_cls(spec)
|
package = pkg_cls(spec)
|
||||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
source_stage = package.stage[0]
|
||||||
package.stage[0].fetcher.get_full_repo = True
|
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||||
|
source_stage.fetcher.get_full_repo = True
|
||||||
# If we retrieved this version before and cached it, we may have
|
# If we retrieved this version before and cached it, we may have
|
||||||
# done so without cloning the full git repo; likewise, any
|
# done so without cloning the full git repo; likewise, any
|
||||||
# mirror might store an instance with truncated history.
|
# mirror might store an instance with truncated history.
|
||||||
package.stage[0].disable_mirrors()
|
source_stage.disable_mirrors()
|
||||||
|
|
||||||
|
source_stage.fetcher.set_package(package)
|
||||||
package.stage.steal_source(abspath)
|
package.stage.steal_source(abspath)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import errno
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -11,43 +12,13 @@
|
|||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.spec import Spec
|
import spack.util.editor
|
||||||
from spack.util.editor import editor
|
|
||||||
|
|
||||||
description = "open package files in $EDITOR"
|
description = "open package files in $EDITOR"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
level = "short"
|
level = "short"
|
||||||
|
|
||||||
|
|
||||||
def edit_package(name, repo_path, namespace):
|
|
||||||
"""Opens the requested package file in your favorite $EDITOR.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name (str): The name of the package
|
|
||||||
repo_path (str): The path to the repository containing this package
|
|
||||||
namespace (str): A valid namespace registered with Spack
|
|
||||||
"""
|
|
||||||
# Find the location of the package
|
|
||||||
if repo_path:
|
|
||||||
repo = spack.repo.Repo(repo_path)
|
|
||||||
elif namespace:
|
|
||||||
repo = spack.repo.PATH.get_repo(namespace)
|
|
||||||
else:
|
|
||||||
repo = spack.repo.PATH
|
|
||||||
path = repo.filename_for_package_name(name)
|
|
||||||
|
|
||||||
spec = Spec(name)
|
|
||||||
if os.path.exists(path):
|
|
||||||
if not os.path.isfile(path):
|
|
||||||
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
|
||||||
if not os.access(path, os.R_OK):
|
|
||||||
tty.die("Insufficient permissions on '%s'!" % path)
|
|
||||||
else:
|
|
||||||
raise spack.repo.UnknownPackageError(spec.name)
|
|
||||||
|
|
||||||
editor(path)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
excl_args = subparser.add_mutually_exclusive_group()
|
excl_args = subparser.add_mutually_exclusive_group()
|
||||||
|
|
||||||
@@ -98,41 +69,67 @@ def setup_parser(subparser):
|
|||||||
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
||||||
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
||||||
|
|
||||||
subparser.add_argument("package", nargs="?", default=None, help="package name")
|
subparser.add_argument("package", nargs="*", default=None, help="package name")
|
||||||
|
|
||||||
|
|
||||||
|
def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
||||||
|
path = repo.filename_for_package_name(name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "r"):
|
||||||
|
return path
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
raise spack.repo.UnknownPackageError(name) from e
|
||||||
|
tty.die(f"Cannot edit package: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def locate_file(name: str, path: str) -> str:
|
||||||
|
# convert command names to python module name
|
||||||
|
if path == spack.paths.command_path:
|
||||||
|
name = spack.cmd.python_name(name)
|
||||||
|
|
||||||
|
file_path = os.path.join(path, name)
|
||||||
|
|
||||||
|
# Try to open direct match.
|
||||||
|
try:
|
||||||
|
with open(file_path, "r"):
|
||||||
|
return file_path
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
tty.die(f"Cannot edit file: {e}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Otherwise try to find a file that starts with the name
|
||||||
|
candidates = glob.glob(file_path + "*")
|
||||||
|
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||||
|
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
|
||||||
|
if len(files) > 1:
|
||||||
|
tty.die(
|
||||||
|
f"Multiple files start with `{name}`:\n"
|
||||||
|
+ "\n".join(f" {os.path.basename(f)}" for f in files)
|
||||||
|
)
|
||||||
|
elif not files:
|
||||||
|
tty.die(f"No file for '{name}' was found in {path}")
|
||||||
|
return files[0]
|
||||||
|
|
||||||
|
|
||||||
def edit(parser, args):
|
def edit(parser, args):
|
||||||
name = args.package
|
names = args.package
|
||||||
|
|
||||||
# By default, edit package files
|
|
||||||
path = spack.paths.packages_path
|
|
||||||
|
|
||||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||||
if args.path:
|
if args.path:
|
||||||
path = args.path
|
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
|
||||||
if name:
|
spack.util.editor.editor(*paths)
|
||||||
# convert command names to python module name
|
elif names:
|
||||||
if path == spack.paths.command_path:
|
if args.repo:
|
||||||
name = spack.cmd.python_name(name)
|
repo = spack.repo.Repo(args.repo)
|
||||||
|
elif args.namespace:
|
||||||
path = os.path.join(path, name)
|
repo = spack.repo.PATH.get_repo(args.namespace)
|
||||||
if not os.path.exists(path):
|
else:
|
||||||
files = glob.glob(path + "*")
|
repo = spack.repo.PATH
|
||||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
paths = [locate_package(name, repo) for name in names]
|
||||||
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
|
spack.util.editor.editor(*paths)
|
||||||
if len(files) > 1:
|
|
||||||
m = "Multiple files exist with the name {0}.".format(name)
|
|
||||||
m += " Please specify a suffix. Files are:\n\n"
|
|
||||||
for f in files:
|
|
||||||
m += " " + os.path.basename(f) + "\n"
|
|
||||||
tty.die(m)
|
|
||||||
if not files:
|
|
||||||
tty.die("No file for '{0}' was found in {1}".format(name, path))
|
|
||||||
path = files[0] # already confirmed only one entry in files
|
|
||||||
|
|
||||||
editor(path)
|
|
||||||
elif name:
|
|
||||||
edit_package(name, args.repo, args.namespace)
|
|
||||||
else:
|
else:
|
||||||
# By default open the directory where packages live
|
# By default open the directory where packages live
|
||||||
editor(path)
|
spack.util.editor.editor(spack.paths.packages_path)
|
||||||
|
|||||||
@@ -468,30 +468,32 @@ def env_remove(args):
|
|||||||
This removes an environment managed by Spack. Directory environments
|
This removes an environment managed by Spack. Directory environments
|
||||||
and manifests embedded in repositories should be removed manually.
|
and manifests embedded in repositories should be removed manually.
|
||||||
"""
|
"""
|
||||||
remove_envs = []
|
read_envs = []
|
||||||
valid_envs = []
|
valid_envs = []
|
||||||
bad_envs = []
|
bad_envs = []
|
||||||
|
invalid_envs = []
|
||||||
|
|
||||||
for env_name in ev.all_environment_names():
|
for env_name in ev.all_environment_names():
|
||||||
try:
|
try:
|
||||||
env = ev.read(env_name)
|
env = ev.read(env_name)
|
||||||
valid_envs.append(env)
|
valid_envs.append(env_name)
|
||||||
|
|
||||||
if env_name in args.rm_env:
|
if env_name in args.rm_env:
|
||||||
remove_envs.append(env)
|
read_envs.append(env)
|
||||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||||
|
invalid_envs.append(env_name)
|
||||||
|
|
||||||
if env_name in args.rm_env:
|
if env_name in args.rm_env:
|
||||||
bad_envs.append(env_name)
|
bad_envs.append(env_name)
|
||||||
|
|
||||||
# Check if remove_env is included from another env before trying to remove
|
# Check if env is linked to another before trying to remove
|
||||||
for env in valid_envs:
|
for name in valid_envs:
|
||||||
for remove_env in remove_envs:
|
|
||||||
# don't check if environment is included to itself
|
# don't check if environment is included to itself
|
||||||
if env.name == remove_env.name:
|
if name == env_name:
|
||||||
continue
|
continue
|
||||||
|
environ = ev.Environment(ev.root(name))
|
||||||
if remove_env.path in env.included_concrete_envs:
|
if ev.root(env_name) in environ.included_concrete_envs:
|
||||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||||
if args.force:
|
if args.force:
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
else:
|
else:
|
||||||
@@ -504,7 +506,7 @@ def env_remove(args):
|
|||||||
if not answer:
|
if not answer:
|
||||||
tty.die("Will not remove any environments")
|
tty.die("Will not remove any environments")
|
||||||
|
|
||||||
for env in remove_envs:
|
for env in read_envs:
|
||||||
name = env.name
|
name = env.name
|
||||||
if env.active:
|
if env.active:
|
||||||
tty.die(f"Environment {name} can't be removed while activated.")
|
tty.die(f"Environment {name} can't be removed while activated.")
|
||||||
|
|||||||
@@ -169,9 +169,9 @@ def query_arguments(args):
|
|||||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||||
installed.append(InstallStatuses.MISSING)
|
installed.append(InstallStatuses.MISSING)
|
||||||
|
|
||||||
predicate_fn = None
|
known = any
|
||||||
if args.unknown:
|
if args.unknown:
|
||||||
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
known = False
|
||||||
|
|
||||||
explicit = any
|
explicit = any
|
||||||
if args.explicit:
|
if args.explicit:
|
||||||
@@ -179,7 +179,7 @@ def query_arguments(args):
|
|||||||
if args.implicit:
|
if args.implicit:
|
||||||
explicit = False
|
explicit = False
|
||||||
|
|
||||||
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit}
|
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||||
|
|
||||||
install_tree = args.install_tree
|
install_tree = args.install_tree
|
||||||
upstreams = spack.config.get("upstreams", {})
|
upstreams = spack.config.get("upstreams", {})
|
||||||
|
|||||||
@@ -50,7 +50,7 @@
|
|||||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||||
|
|
||||||
architecture variants:
|
architecture variants:
|
||||||
@m{platform=platform} linux, darwin, cray, etc.
|
@m{platform=platform} linux, darwin, freebsd, windows
|
||||||
@m{os=operating_system} specific <operating_system>
|
@m{os=operating_system} specific <operating_system>
|
||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
from llnl.string import plural
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
@@ -375,7 +376,9 @@ def _maybe_add_and_concretize(args, env, specs):
|
|||||||
# `spack concretize`
|
# `spack concretize`
|
||||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||||
concretized_specs = env.concretize(tests=tests)
|
concretized_specs = env.concretize(tests=tests)
|
||||||
ev.display_specs(concretized_specs)
|
if concretized_specs:
|
||||||
|
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
|
||||||
|
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||||
|
|
||||||
# save view regeneration for later, so that we only do it
|
# save view regeneration for later, so that we only do it
|
||||||
# once, as it can be slow.
|
# once, as it can be slow.
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import datetime
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
@@ -96,7 +97,7 @@ def list_files(args):
|
|||||||
OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
|
OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
|
||||||
|
|
||||||
#: Latest year that copyright applies. UPDATE THIS when bumping copyright.
|
#: Latest year that copyright applies. UPDATE THIS when bumping copyright.
|
||||||
latest_year = 2024 # year of 0.22 release
|
latest_year = datetime.date.today().year
|
||||||
strict_date = r"Copyright 2013-%s" % latest_year
|
strict_date = r"Copyright 2013-%s" % latest_year
|
||||||
|
|
||||||
#: regexes for valid license lines at tops of files
|
#: regexes for valid license lines at tops of files
|
||||||
|
|||||||
@@ -101,9 +101,8 @@ def do_mark(specs, explicit):
|
|||||||
specs (list): list of specs to be marked
|
specs (list): list of specs to be marked
|
||||||
explicit (bool): whether to mark specs as explicitly installed
|
explicit (bool): whether to mark specs as explicitly installed
|
||||||
"""
|
"""
|
||||||
with spack.store.STORE.db.write_transaction():
|
for spec in specs:
|
||||||
for spec in specs:
|
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||||
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
|
||||||
|
|
||||||
|
|
||||||
def mark_specs(args, specs):
|
def mark_specs(args, specs):
|
||||||
|
|||||||
@@ -377,10 +377,7 @@ def refresh(module_type, specs, args):
|
|||||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||||
# Qualifiers to be used when querying the db for specs
|
# Qualifiers to be used when querying the db for specs
|
||||||
constraint_qualifiers = {
|
constraint_qualifiers = {
|
||||||
"refresh": {
|
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||||
"installed": True,
|
|
||||||
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||||
|
|
||||||
|
|||||||
@@ -114,15 +114,16 @@ def _process_result(result, show, required_format, kwargs):
|
|||||||
|
|
||||||
# dump the solutions as concretized specs
|
# dump the solutions as concretized specs
|
||||||
if "solutions" in show:
|
if "solutions" in show:
|
||||||
for spec in result.specs:
|
if required_format:
|
||||||
# With -y, just print YAML to output.
|
for spec in result.specs:
|
||||||
if required_format == "yaml":
|
# With -y, just print YAML to output.
|
||||||
# use write because to_yaml already has a newline.
|
if required_format == "yaml":
|
||||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
# use write because to_yaml already has a newline.
|
||||||
elif required_format == "json":
|
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
elif required_format == "json":
|
||||||
else:
|
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||||
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
|
else:
|
||||||
|
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
|
||||||
print()
|
print()
|
||||||
|
|
||||||
if result.unsolved_specs and "solutions" in show:
|
if result.unsolved_specs and "solutions" in show:
|
||||||
|
|||||||
@@ -105,11 +105,19 @@ def spec(parser, args):
|
|||||||
if env:
|
if env:
|
||||||
env.concretize()
|
env.concretize()
|
||||||
specs = env.concretized_specs()
|
specs = env.concretized_specs()
|
||||||
|
|
||||||
|
# environments are printed together in a combined tree() invocation,
|
||||||
|
# except when using --yaml or --json, which we print spec by spec below.
|
||||||
|
if not args.format:
|
||||||
|
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||||
|
tree_kwargs["hashes"] = args.long or args.very_long
|
||||||
|
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
tty.die("spack spec requires at least one spec or an active environment")
|
tty.die("spack spec requires at least one spec or an active environment")
|
||||||
|
|
||||||
for input, output in specs:
|
for input, output in specs:
|
||||||
# With -y, just print YAML to output.
|
# With --yaml or --json, just print the raw specs to output
|
||||||
if args.format:
|
if args.format:
|
||||||
if args.format == "yaml":
|
if args.format == "yaml":
|
||||||
# use write because to_yaml already has a newline.
|
# use write because to_yaml already has a newline.
|
||||||
|
|||||||
@@ -151,7 +151,8 @@ def is_installed(spec):
|
|||||||
key=lambda s: s.dag_hash(),
|
key=lambda s: s.dag_hash(),
|
||||||
)
|
)
|
||||||
|
|
||||||
return [spec for spec in specs if is_installed(spec)]
|
with spack.store.STORE.db.read_transaction():
|
||||||
|
return [spec for spec in specs if is_installed(spec)]
|
||||||
|
|
||||||
|
|
||||||
def dependent_environments(
|
def dependent_environments(
|
||||||
@@ -239,6 +240,8 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
|||||||
print()
|
print()
|
||||||
tty.info("The following environments still reference these specs:")
|
tty.info("The following environments still reference these specs:")
|
||||||
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
||||||
|
if env:
|
||||||
|
msgs.append("use `spack remove` to remove the spec from the current environment")
|
||||||
msgs.append("use `spack env remove` to remove environments")
|
msgs.append("use `spack env remove` to remove environments")
|
||||||
msgs.append("use `spack uninstall --force` to override")
|
msgs.append("use `spack uninstall --force` to override")
|
||||||
print()
|
print()
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ def unload(parser, args):
|
|||||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||||
)
|
)
|
||||||
|
|
||||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = [
|
specs = [
|
||||||
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
||||||
|
|||||||
@@ -290,7 +290,7 @@ def __init__(
|
|||||||
operating_system,
|
operating_system,
|
||||||
target,
|
target,
|
||||||
paths,
|
paths,
|
||||||
modules: Optional[List[str]] = None,
|
modules=None,
|
||||||
alias=None,
|
alias=None,
|
||||||
environment=None,
|
environment=None,
|
||||||
extra_rpaths=None,
|
extra_rpaths=None,
|
||||||
@@ -695,10 +695,6 @@ def compiler_environment(self):
|
|||||||
try:
|
try:
|
||||||
# load modules and set env variables
|
# load modules and set env variables
|
||||||
for module in self.modules:
|
for module in self.modules:
|
||||||
# On cray, mic-knl module cannot be loaded without cce module
|
|
||||||
# See: https://github.com/spack/spack/issues/3153
|
|
||||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
|
||||||
spack.util.module_cmd.load_module("cce")
|
|
||||||
spack.util.module_cmd.load_module(module)
|
spack.util.module_cmd.load_module(module)
|
||||||
|
|
||||||
# apply other compiler environment changes
|
# apply other compiler environment changes
|
||||||
|
|||||||
@@ -96,6 +96,8 @@ def verbose_flag(self):
|
|||||||
|
|
||||||
openmp_flag = "-fopenmp"
|
openmp_flag = "-fopenmp"
|
||||||
|
|
||||||
|
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cxx11_flag(self):
|
def cxx11_flag(self):
|
||||||
if self.real_version < Version("3.3"):
|
if self.real_version < Version("3.3"):
|
||||||
@@ -120,6 +122,24 @@ def cxx17_flag(self):
|
|||||||
|
|
||||||
return "-std=c++17"
|
return "-std=c++17"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx20_flag(self):
|
||||||
|
if self.real_version < Version("5.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||||
|
elif self.real_version < Version("11.0"):
|
||||||
|
return "-std=c++2a"
|
||||||
|
else:
|
||||||
|
return "-std=c++20"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx23_flag(self):
|
||||||
|
if self.real_version < Version("12.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||||
|
elif self.real_version < Version("17.0"):
|
||||||
|
return "-std=c++2b"
|
||||||
|
else:
|
||||||
|
return "-std=c++23"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def c99_flag(self):
|
def c99_flag(self):
|
||||||
return "-std=c99"
|
return "-std=c99"
|
||||||
@@ -142,7 +162,10 @@ def c17_flag(self):
|
|||||||
def c23_flag(self):
|
def c23_flag(self):
|
||||||
if self.real_version < Version("9.0"):
|
if self.real_version < Version("9.0"):
|
||||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||||
return "-std=c2x"
|
elif self.real_version < Version("18.0"):
|
||||||
|
return "-std=c2x"
|
||||||
|
else:
|
||||||
|
return "-std=c23"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cc_pic_flag(self):
|
def cc_pic_flag(self):
|
||||||
|
|||||||
@@ -78,24 +78,17 @@
|
|||||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"centos:stream": {
|
"centos:stream9": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/centos_stream.dockerfile",
|
"template": "container/centos_stream9.dockerfile",
|
||||||
"image": "quay.io/centos/centos:stream"
|
"image": "quay.io/centos/centos:stream9"
|
||||||
},
|
},
|
||||||
"os_package_manager": "dnf_epel",
|
"os_package_manager": "dnf_epel",
|
||||||
"build": "spack/centos-stream",
|
"build": "spack/centos-stream9",
|
||||||
"final": {
|
"final": {
|
||||||
"image": "quay.io/centos/centos:stream"
|
"image": "quay.io/centos/centos:stream9"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"centos:7": {
|
|
||||||
"bootstrap": {
|
|
||||||
"template": "container/centos_7.dockerfile"
|
|
||||||
},
|
|
||||||
"os_package_manager": "yum",
|
|
||||||
"build": "spack/centos7"
|
|
||||||
},
|
|
||||||
"opensuse/leap:15": {
|
"opensuse/leap:15": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/leap-15.dockerfile"
|
"template": "container/leap-15.dockerfile"
|
||||||
|
|||||||
@@ -283,9 +283,12 @@ def __reduce__(self):
|
|||||||
database. If it is a spec, we'll evaluate
|
database. If it is a spec, we'll evaluate
|
||||||
``spec.satisfies(query_spec)``
|
``spec.satisfies(query_spec)``
|
||||||
|
|
||||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
known (bool or None): Specs that are "known" are those
|
||||||
whether that record is selected for the query. It can be used to craft criteria
|
for which Spack can locate a ``package.py`` file -- i.e.,
|
||||||
that need some data for selection not provided by the Database itself.
|
Spack "knows" how to install them. Specs that are unknown may
|
||||||
|
represent packages that existed in a previous version of
|
||||||
|
Spack, but have since either changed their name or
|
||||||
|
been removed
|
||||||
|
|
||||||
installed (bool or InstallStatus or typing.Iterable or None):
|
installed (bool or InstallStatus or typing.Iterable or None):
|
||||||
if ``True``, includes only installed
|
if ``True``, includes only installed
|
||||||
@@ -585,9 +588,6 @@ def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
|||||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||||
|
|
||||||
|
|
||||||
SelectType = Callable[[InstallRecord], bool]
|
|
||||||
|
|
||||||
|
|
||||||
class Database:
|
class Database:
|
||||||
#: Fields written for each install record
|
#: Fields written for each install record
|
||||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||||
@@ -1367,7 +1367,7 @@ def _deprecate(self, spec, deprecator):
|
|||||||
self._data[spec_key] = spec_rec
|
self._data[spec_key] = spec_rec
|
||||||
|
|
||||||
@_autospec
|
@_autospec
|
||||||
def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None:
|
def mark(self, spec, key, value):
|
||||||
"""Mark an arbitrary record on a spec."""
|
"""Mark an arbitrary record on a spec."""
|
||||||
with self.write_transaction():
|
with self.write_transaction():
|
||||||
return self._mark(spec, key, value)
|
return self._mark(spec, key, value)
|
||||||
@@ -1516,7 +1516,7 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
|
|||||||
def _query(
|
def _query(
|
||||||
self,
|
self,
|
||||||
query_spec=any,
|
query_spec=any,
|
||||||
predicate_fn: Optional[SelectType] = None,
|
known=any,
|
||||||
installed=True,
|
installed=True,
|
||||||
explicit=any,
|
explicit=any,
|
||||||
start_date=None,
|
start_date=None,
|
||||||
@@ -1524,7 +1524,7 @@ def _query(
|
|||||||
hashes=None,
|
hashes=None,
|
||||||
in_buildcache=any,
|
in_buildcache=any,
|
||||||
origin=None,
|
origin=None,
|
||||||
) -> List["spack.spec.Spec"]:
|
):
|
||||||
"""Run a query on the database."""
|
"""Run a query on the database."""
|
||||||
|
|
||||||
# TODO: Specs are a lot like queries. Should there be a
|
# TODO: Specs are a lot like queries. Should there be a
|
||||||
@@ -1570,7 +1570,7 @@ def _query(
|
|||||||
if explicit is not any and rec.explicit != explicit:
|
if explicit is not any and rec.explicit != explicit:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if predicate_fn is not None and not predicate_fn(rec):
|
if known is not any and known(rec.spec.name):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if start_date or end_date:
|
if start_date or end_date:
|
||||||
@@ -1655,14 +1655,14 @@ def query(self, *args, **kwargs):
|
|||||||
query.__doc__ = ""
|
query.__doc__ = ""
|
||||||
query.__doc__ += _QUERY_DOCSTRING
|
query.__doc__ += _QUERY_DOCSTRING
|
||||||
|
|
||||||
def query_one(self, query_spec, predicate_fn=None, installed=True):
|
def query_one(self, query_spec, known=any, installed=True):
|
||||||
"""Query for exactly one spec that matches the query spec.
|
"""Query for exactly one spec that matches the query spec.
|
||||||
|
|
||||||
Raises an assertion error if more than one spec matches the
|
Raises an assertion error if more than one spec matches the
|
||||||
query. Returns None if no installed package matches.
|
query. Returns None if no installed package matches.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
concrete_specs = self.query(query_spec, predicate_fn=predicate_fn, installed=installed)
|
concrete_specs = self.query(query_spec, known=known, installed=installed)
|
||||||
assert len(concrete_specs) <= 1
|
assert len(concrete_specs) <= 1
|
||||||
return concrete_specs[0] if concrete_specs else None
|
return concrete_specs[0] if concrete_specs else None
|
||||||
|
|
||||||
@@ -1709,6 +1709,24 @@ def root(key, record):
|
|||||||
if id(rec.spec) not in needed and rec.installed
|
if id(rec.spec) not in needed and rec.installed
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def update_explicit(self, spec, explicit):
|
||||||
|
"""
|
||||||
|
Update the spec's explicit state in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
spec (spack.spec.Spec): the spec whose install record is being updated
|
||||||
|
explicit (bool): ``True`` if the package was requested explicitly
|
||||||
|
by the user, ``False`` if it was pulled in as a dependency of
|
||||||
|
an explicit package.
|
||||||
|
"""
|
||||||
|
rec = self.get_record(spec)
|
||||||
|
if explicit != rec.explicit:
|
||||||
|
with self.write_transaction():
|
||||||
|
message = "{s.name}@{s.version} : marking the package {0}"
|
||||||
|
status = "explicit" if explicit else "implicit"
|
||||||
|
tty.debug(message.format(status, s=spec))
|
||||||
|
rec.explicit = explicit
|
||||||
|
|
||||||
|
|
||||||
class UpstreamDatabaseLockingError(SpackError):
|
class UpstreamDatabaseLockingError(SpackError):
|
||||||
"""Raised when an operation would need to lock an upstream database"""
|
"""Raised when an operation would need to lock an upstream database"""
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ class OpenMpi(Package):
|
|||||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||||
|
|
||||||
|
|
||||||
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
||||||
|
|
||||||
|
|
||||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||||
|
|||||||
@@ -9,13 +9,11 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shlex
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment.environment as ev
|
import spack.environment.environment as ev
|
||||||
import spack.paths
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.traverse as traverse
|
import spack.traverse as traverse
|
||||||
|
|
||||||
@@ -228,7 +226,6 @@ def to_dict(self):
|
|||||||
"install_deps_target": self._target("install-deps"),
|
"install_deps_target": self._target("install-deps"),
|
||||||
"any_hash_target": self._target("%"),
|
"any_hash_target": self._target("%"),
|
||||||
"jobserver_support": self.jobserver_support,
|
"jobserver_support": self.jobserver_support,
|
||||||
"spack_script": shlex.quote(spack.paths.spack_script),
|
|
||||||
"adjacency_list": self.make_adjacency_list,
|
"adjacency_list": self.make_adjacency_list,
|
||||||
"phony_convenience_targets": " ".join(self.phony_convenience_targets),
|
"phony_convenience_targets": " ".join(self.phony_convenience_targets),
|
||||||
"pkg_ids_variable": self.pkg_identifier_variable,
|
"pkg_ids_variable": self.pkg_identifier_variable,
|
||||||
|
|||||||
@@ -24,6 +24,7 @@
|
|||||||
from llnl.util.link_tree import ConflictingSpecsError
|
from llnl.util.link_tree import ConflictingSpecsError
|
||||||
from llnl.util.symlink import readlink, symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
|
import spack.cmd
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -1190,6 +1191,7 @@ def scope_name(self):
|
|||||||
def include_concrete_envs(self):
|
def include_concrete_envs(self):
|
||||||
"""Copy and save the included envs' specs internally"""
|
"""Copy and save the included envs' specs internally"""
|
||||||
|
|
||||||
|
lockfile_meta = None
|
||||||
root_hash_seen = set()
|
root_hash_seen = set()
|
||||||
concrete_hash_seen = set()
|
concrete_hash_seen = set()
|
||||||
self.included_concrete_spec_data = {}
|
self.included_concrete_spec_data = {}
|
||||||
@@ -1200,26 +1202,37 @@ def include_concrete_envs(self):
|
|||||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||||
|
|
||||||
env = Environment(env_path)
|
env = Environment(env_path)
|
||||||
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
|
|
||||||
|
with open(env.lock_path) as f:
|
||||||
|
lockfile_as_dict = env._read_lockfile(f)
|
||||||
|
|
||||||
|
# Lockfile_meta must match each env and use at least format version 5
|
||||||
|
if lockfile_meta is None:
|
||||||
|
lockfile_meta = lockfile_as_dict["_meta"]
|
||||||
|
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||||
|
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||||
|
elif lockfile_meta["lockfile-version"] < 5:
|
||||||
|
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||||
|
|
||||||
# Copy unique root specs from env
|
# Copy unique root specs from env
|
||||||
for root_dict in env._concrete_roots_dict():
|
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||||
|
for root_dict in lockfile_as_dict["roots"]:
|
||||||
if root_dict["hash"] not in root_hash_seen:
|
if root_dict["hash"] not in root_hash_seen:
|
||||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||||
root_hash_seen.add(root_dict["hash"])
|
root_hash_seen.add(root_dict["hash"])
|
||||||
|
|
||||||
# Copy unique concrete specs from env
|
# Copy unique concrete specs from env
|
||||||
for dag_hash, spec_details in env._concrete_specs_dict().items():
|
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||||
if dag_hash not in concrete_hash_seen:
|
if concrete_spec not in concrete_hash_seen:
|
||||||
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
|
self.included_concrete_spec_data[env_path].update(
|
||||||
{dag_hash: spec_details}
|
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||||
)
|
)
|
||||||
concrete_hash_seen.add(dag_hash)
|
concrete_hash_seen.add(concrete_spec)
|
||||||
|
|
||||||
# Copy transitive include data
|
if "include_concrete" in lockfile_as_dict.keys():
|
||||||
transitive = env.included_concrete_spec_data
|
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||||
if transitive:
|
"include_concrete"
|
||||||
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
|
]
|
||||||
|
|
||||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||||
self.write()
|
self.write()
|
||||||
@@ -1936,16 +1949,17 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
|||||||
specs = specs if specs is not None else roots
|
specs = specs if specs is not None else roots
|
||||||
|
|
||||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||||
install_args["overwrite"] = {
|
overwrite: Set[str] = set()
|
||||||
*install_args.get("overwrite", ()),
|
overwrite.update(install_args.get("overwrite", []), self._dev_specs_that_need_overwrite())
|
||||||
*self._dev_specs_that_need_overwrite(),
|
install_args["overwrite"] = overwrite
|
||||||
}
|
|
||||||
|
|
||||||
# Only environment roots are marked explicit
|
explicit: Set[str] = set()
|
||||||
install_args["explicit"] = {
|
explicit.update(
|
||||||
*install_args.get("explicit", ()),
|
install_args.get("explicit", []),
|
||||||
*(s.dag_hash() for s in roots),
|
(s.dag_hash() for s in specs),
|
||||||
}
|
(s.dag_hash() for s in roots),
|
||||||
|
)
|
||||||
|
install_args["explicit"] = explicit
|
||||||
|
|
||||||
PackageInstaller([spec.package for spec in specs], install_args).install()
|
PackageInstaller([spec.package for spec in specs], install_args).install()
|
||||||
|
|
||||||
@@ -2137,23 +2151,16 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
|||||||
|
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
def _concrete_specs_dict(self):
|
def _to_lockfile_dict(self):
|
||||||
|
"""Create a dictionary to store a lockfile for this environment."""
|
||||||
concrete_specs = {}
|
concrete_specs = {}
|
||||||
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
||||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||||
# Assumes no legacy formats, since this was just created.
|
# Assumes no legacy formats, since this was just created.
|
||||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||||
concrete_specs[s.dag_hash()] = spec_dict
|
concrete_specs[s.dag_hash()] = spec_dict
|
||||||
return concrete_specs
|
|
||||||
|
|
||||||
def _concrete_roots_dict(self):
|
|
||||||
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
||||||
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
|
|
||||||
|
|
||||||
def _to_lockfile_dict(self):
|
|
||||||
"""Create a dictionary to store a lockfile for this environment."""
|
|
||||||
concrete_specs = self._concrete_specs_dict()
|
|
||||||
root_specs = self._concrete_roots_dict()
|
|
||||||
|
|
||||||
spack_dict = {"version": spack.spack_version}
|
spack_dict = {"version": spack.spack_version}
|
||||||
spack_commit = spack.main.get_spack_commit()
|
spack_commit = spack.main.get_spack_commit()
|
||||||
@@ -2174,7 +2181,7 @@ def _to_lockfile_dict(self):
|
|||||||
# spack version information
|
# spack version information
|
||||||
"spack": spack_dict,
|
"spack": spack_dict,
|
||||||
# users specs + hashes are the 'roots' of the environment
|
# users specs + hashes are the 'roots' of the environment
|
||||||
"roots": root_specs,
|
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
||||||
# Concrete specs by hash, including dependencies
|
# Concrete specs by hash, including dependencies
|
||||||
"concrete_specs": concrete_specs,
|
"concrete_specs": concrete_specs,
|
||||||
}
|
}
|
||||||
@@ -2467,27 +2474,21 @@ def _equiv_dict(first, second):
|
|||||||
return same_values and same_keys_with_same_overrides
|
return same_values and same_keys_with_same_overrides
|
||||||
|
|
||||||
|
|
||||||
def display_specs(concretized_specs):
|
def display_specs(specs):
|
||||||
"""Displays the list of specs returned by `Environment.concretize()`.
|
"""Displays a list of specs traversed breadth-first, covering nodes, with install status.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
concretized_specs (list): list of specs returned by
|
specs (list): list of specs
|
||||||
`Environment.concretize()`
|
|
||||||
"""
|
"""
|
||||||
|
tree_string = spack.spec.tree(
|
||||||
def _tree_to_display(spec):
|
specs,
|
||||||
return spec.tree(
|
format=spack.spec.DISPLAY_FORMAT,
|
||||||
recurse_dependencies=True,
|
hashes=True,
|
||||||
format=spack.spec.DISPLAY_FORMAT,
|
hashlen=7,
|
||||||
status_fn=spack.spec.Spec.install_status,
|
status_fn=spack.spec.Spec.install_status,
|
||||||
hashlen=7,
|
key=traverse.by_dag_hash,
|
||||||
hashes=True,
|
)
|
||||||
)
|
print(tree_string)
|
||||||
|
|
||||||
for user_spec, concrete_spec in concretized_specs:
|
|
||||||
tty.msg("Concretized {0}".format(user_spec))
|
|
||||||
sys.stdout.write(_tree_to_display(concrete_spec))
|
|
||||||
print("")
|
|
||||||
|
|
||||||
|
|
||||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||||
|
|||||||
@@ -30,7 +30,6 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
@@ -274,7 +273,10 @@ def __init__(self, url=None, checksum=None, **kwargs):
|
|||||||
@property
|
@property
|
||||||
def curl(self):
|
def curl(self):
|
||||||
if not self._curl:
|
if not self._curl:
|
||||||
self._curl = web_util.require_curl()
|
try:
|
||||||
|
self._curl = which("curl", required=True)
|
||||||
|
except CommandNotFoundError as exc:
|
||||||
|
tty.error(str(exc))
|
||||||
return self._curl
|
return self._curl
|
||||||
|
|
||||||
def source_id(self):
|
def source_id(self):
|
||||||
@@ -295,23 +297,27 @@ def candidate_urls(self):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.debug(f"Already downloaded {self.archive_file}")
|
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||||
return
|
return
|
||||||
|
|
||||||
errors: List[Exception] = []
|
url = None
|
||||||
|
errors = []
|
||||||
for url in self.candidate_urls:
|
for url in self.candidate_urls:
|
||||||
|
if not web_util.url_exists(url):
|
||||||
|
tty.debug("URL does not exist: " + url)
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._fetch_from_url(url)
|
self._fetch_from_url(url)
|
||||||
break
|
break
|
||||||
except FailedDownloadError as e:
|
except FailedDownloadError as e:
|
||||||
errors.extend(e.exceptions)
|
errors.append(str(e))
|
||||||
else:
|
|
||||||
raise FailedDownloadError(*errors)
|
for msg in errors:
|
||||||
|
tty.debug(msg)
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(url)
|
||||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
|
||||||
)
|
|
||||||
|
|
||||||
def _fetch_from_url(self, url):
|
def _fetch_from_url(self, url):
|
||||||
if spack.config.get("config:url_fetch_method") == "curl":
|
if spack.config.get("config:url_fetch_method") == "curl":
|
||||||
@@ -330,20 +336,19 @@ def _check_headers(self, headers):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def _fetch_urllib(self, url):
|
def _fetch_urllib(self, url):
|
||||||
save_file = self.stage.save_filename
|
save_file = self.stage.save_filename
|
||||||
|
tty.msg("Fetching {0}".format(url))
|
||||||
|
|
||||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
# Run urllib but grab the mime type from the http headers
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = web_util.urlopen(request)
|
url, headers, response = web_util.read_from_url(url)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except web_util.SpackWebError as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
if os.path.lexists(save_file):
|
if os.path.lexists(save_file):
|
||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
raise FailedDownloadError(e) from e
|
msg = "urllib failed to fetch with error {0}".format(e)
|
||||||
|
raise FailedDownloadError(url, msg)
|
||||||
tty.msg(f"Fetching {url}")
|
|
||||||
|
|
||||||
if os.path.lexists(save_file):
|
if os.path.lexists(save_file):
|
||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
@@ -351,7 +356,7 @@ def _fetch_urllib(self, url):
|
|||||||
with open(save_file, "wb") as _open_file:
|
with open(save_file, "wb") as _open_file:
|
||||||
shutil.copyfileobj(response, _open_file)
|
shutil.copyfileobj(response, _open_file)
|
||||||
|
|
||||||
self._check_headers(str(response.headers))
|
self._check_headers(str(headers))
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def _fetch_curl(self, url):
|
def _fetch_curl(self, url):
|
||||||
@@ -360,7 +365,7 @@ def _fetch_curl(self, url):
|
|||||||
if self.stage.save_filename:
|
if self.stage.save_filename:
|
||||||
save_file = self.stage.save_filename
|
save_file = self.stage.save_filename
|
||||||
partial_file = self.stage.save_filename + ".part"
|
partial_file = self.stage.save_filename + ".part"
|
||||||
tty.msg(f"Fetching {url}")
|
tty.msg("Fetching {0}".format(url))
|
||||||
if partial_file:
|
if partial_file:
|
||||||
save_args = [
|
save_args = [
|
||||||
"-C",
|
"-C",
|
||||||
@@ -400,8 +405,8 @@ def _fetch_curl(self, url):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
web_util.check_curl_code(curl.returncode)
|
web_util.check_curl_code(curl.returncode)
|
||||||
except spack.error.FetchError as e:
|
except spack.error.FetchError as err:
|
||||||
raise FailedDownloadError(e) from e
|
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||||
|
|
||||||
self._check_headers(headers)
|
self._check_headers(headers)
|
||||||
|
|
||||||
@@ -549,13 +554,13 @@ def fetch(self):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
response = self._urlopen(self.url)
|
response = self._urlopen(self.url)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except urllib.error.URLError as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
if os.path.lexists(file):
|
if os.path.lexists(file):
|
||||||
os.remove(file)
|
os.remove(file)
|
||||||
raise FailedDownloadError(e) from e
|
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
||||||
|
|
||||||
if os.path.lexists(file):
|
if os.path.lexists(file):
|
||||||
os.remove(file)
|
os.remove(file)
|
||||||
@@ -1307,41 +1312,35 @@ def __init__(self, *args, **kwargs):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.debug(f"Already downloaded {self.archive_file}")
|
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||||
return
|
return
|
||||||
|
|
||||||
parsed_url = urllib.parse.urlparse(self.url)
|
parsed_url = urllib.parse.urlparse(self.url)
|
||||||
if parsed_url.scheme != "s3":
|
if parsed_url.scheme != "s3":
|
||||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||||
|
|
||||||
|
tty.debug("Fetching {0}".format(self.url))
|
||||||
|
|
||||||
basename = os.path.basename(parsed_url.path)
|
basename = os.path.basename(parsed_url.path)
|
||||||
request = urllib.request.Request(
|
|
||||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
|
||||||
)
|
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
with working_dir(self.stage.path):
|
||||||
try:
|
_, headers, stream = web_util.read_from_url(self.url)
|
||||||
response = web_util.urlopen(request)
|
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
|
||||||
raise FailedDownloadError(e) from e
|
|
||||||
|
|
||||||
tty.debug(f"Fetching {self.url}")
|
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
with open(basename, "wb") as f:
|
||||||
shutil.copyfileobj(response, f)
|
shutil.copyfileobj(stream, f)
|
||||||
|
|
||||||
content_type = web_util.get_header(response.headers, "Content-type")
|
content_type = web_util.get_header(headers, "Content-type")
|
||||||
|
|
||||||
if content_type == "text/html":
|
if content_type == "text/html":
|
||||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||||
|
|
||||||
if self.stage.save_filename:
|
if self.stage.save_filename:
|
||||||
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
llnl.util.filesystem.rename(
|
||||||
|
os.path.join(self.stage.path, basename), self.stage.save_filename
|
||||||
|
)
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(self.url)
|
||||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@fetcher
|
@fetcher
|
||||||
@@ -1367,23 +1366,17 @@ def fetch(self):
|
|||||||
if parsed_url.scheme != "gs":
|
if parsed_url.scheme != "gs":
|
||||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||||
|
|
||||||
|
tty.debug("Fetching {0}".format(self.url))
|
||||||
|
|
||||||
basename = os.path.basename(parsed_url.path)
|
basename = os.path.basename(parsed_url.path)
|
||||||
request = urllib.request.Request(
|
|
||||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
|
||||||
)
|
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
with working_dir(self.stage.path):
|
||||||
try:
|
_, headers, stream = web_util.read_from_url(self.url)
|
||||||
response = web_util.urlopen(request)
|
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
|
||||||
raise FailedDownloadError(e) from e
|
|
||||||
|
|
||||||
tty.debug(f"Fetching {self.url}")
|
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
with open(basename, "wb") as f:
|
||||||
shutil.copyfileobj(response, f)
|
shutil.copyfileobj(stream, f)
|
||||||
|
|
||||||
content_type = web_util.get_header(response.headers, "Content-type")
|
content_type = web_util.get_header(headers, "Content-type")
|
||||||
|
|
||||||
if content_type == "text/html":
|
if content_type == "text/html":
|
||||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||||
@@ -1392,9 +1385,7 @@ def fetch(self):
|
|||||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(self.url)
|
||||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@fetcher
|
@fetcher
|
||||||
@@ -1731,9 +1722,9 @@ class NoCacheError(spack.error.FetchError):
|
|||||||
class FailedDownloadError(spack.error.FetchError):
|
class FailedDownloadError(spack.error.FetchError):
|
||||||
"""Raised when a download fails."""
|
"""Raised when a download fails."""
|
||||||
|
|
||||||
def __init__(self, *exceptions: Exception):
|
def __init__(self, url, msg=""):
|
||||||
super().__init__("Failed to download")
|
super().__init__("Failed to fetch file from URL: %s" % url, msg)
|
||||||
self.exceptions = exceptions
|
self.url = url
|
||||||
|
|
||||||
|
|
||||||
class NoArchiveFileError(spack.error.FetchError):
|
class NoArchiveFileError(spack.error.FetchError):
|
||||||
|
|||||||
@@ -33,7 +33,6 @@
|
|||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.directory_layout
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.projections
|
import spack.projections
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
@@ -51,7 +50,7 @@
|
|||||||
_projections_path = ".spack/projections.yaml"
|
_projections_path = ".spack/projections.yaml"
|
||||||
|
|
||||||
|
|
||||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional[spack.spec.Spec]], None]
|
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional["spack.spec.Spec"]], None]
|
||||||
|
|
||||||
|
|
||||||
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||||
@@ -63,7 +62,7 @@ def view_hardlink(src: str, dst: str, *args, **kwargs) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def view_copy(
|
def view_copy(
|
||||||
src: str, dst: str, view: "FilesystemView", spec: Optional[spack.spec.Spec] = None
|
src: str, dst: str, view: "FilesystemView", spec: Optional["spack.spec.Spec"] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Copy a file from src to dst.
|
Copy a file from src to dst.
|
||||||
@@ -159,7 +158,7 @@ class FilesystemView:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root: str,
|
root: str,
|
||||||
layout: spack.directory_layout.DirectoryLayout,
|
layout: "spack.directory_layout.DirectoryLayout",
|
||||||
*,
|
*,
|
||||||
projections: Optional[Dict] = None,
|
projections: Optional[Dict] = None,
|
||||||
ignore_conflicts: bool = False,
|
ignore_conflicts: bool = False,
|
||||||
@@ -181,10 +180,7 @@ def __init__(
|
|||||||
|
|
||||||
# Setup link function to include view
|
# Setup link function to include view
|
||||||
self.link_type = link_type
|
self.link_type = link_type
|
||||||
self._link = function_for_link_type(link_type)
|
self.link = ft.partial(function_for_link_type(link_type), view=self)
|
||||||
|
|
||||||
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
|
||||||
self._link(src, dst, self, spec)
|
|
||||||
|
|
||||||
def add_specs(self, *specs, **kwargs):
|
def add_specs(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
@@ -285,7 +281,7 @@ class YamlFilesystemView(FilesystemView):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root: str,
|
root: str,
|
||||||
layout: spack.directory_layout.DirectoryLayout,
|
layout: "spack.directory_layout.DirectoryLayout",
|
||||||
*,
|
*,
|
||||||
projections: Optional[Dict] = None,
|
projections: Optional[Dict] = None,
|
||||||
ignore_conflicts: bool = False,
|
ignore_conflicts: bool = False,
|
||||||
|
|||||||
@@ -41,9 +41,8 @@ def _populate_hooks(cls):
|
|||||||
|
|
||||||
relative_names = list(list_modules(spack.paths.hooks_path))
|
relative_names = list(list_modules(spack.paths.hooks_path))
|
||||||
|
|
||||||
# write_install_manifest should come after any mutation of the install prefix, and
|
# Ensure that write_install_manifest comes last
|
||||||
# autopush should include the install manifest.
|
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest")
|
||||||
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest", "autopush")
|
|
||||||
|
|
||||||
for name in relative_names:
|
for name in relative_names:
|
||||||
module_name = __name__ + "." + name
|
module_name = __name__ + "." + name
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
from spack.util.elf import ElfParsingError, parse_elf
|
from spack.util.elf import ElfParsingError, parse_elf
|
||||||
from spack.util.executable import Executable
|
|
||||||
|
|
||||||
|
|
||||||
def is_shared_library_elf(filepath):
|
def is_shared_library_elf(filepath):
|
||||||
@@ -141,7 +140,7 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Only enable on platforms using ELF.
|
# Only enable on platforms using ELF.
|
||||||
if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"):
|
if not spec.satisfies("platform=linux"):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Disable this hook when bootstrapping, to avoid recursion.
|
# Disable this hook when bootstrapping, to avoid recursion.
|
||||||
@@ -149,10 +148,9 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Should failing to locate patchelf be a hard error?
|
# Should failing to locate patchelf be a hard error?
|
||||||
patchelf_path = spack.relocate._patchelf()
|
patchelf = spack.relocate._patchelf()
|
||||||
if not patchelf_path:
|
if not patchelf:
|
||||||
return
|
return
|
||||||
patchelf = Executable(patchelf_path)
|
|
||||||
|
|
||||||
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
||||||
|
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Only enable on platforms using ELF.
|
# Only enable on platforms using ELF.
|
||||||
if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"):
|
if not spec.satisfies("platform=linux"):
|
||||||
return
|
return
|
||||||
|
|
||||||
visit_directory_tree(spec.prefix, ElfFilesWithRPathVisitor())
|
visit_directory_tree(spec.prefix, ElfFilesWithRPathVisitor())
|
||||||
|
|||||||
@@ -440,7 +440,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
|||||||
tty.debug(f"{pre} already registered in DB")
|
tty.debug(f"{pre} already registered in DB")
|
||||||
record = spack.store.STORE.db.get_record(spec)
|
record = spack.store.STORE.db.get_record(spec)
|
||||||
if explicit and not record.explicit:
|
if explicit and not record.explicit:
|
||||||
spack.store.STORE.db.mark(spec, "explicit", True)
|
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# If not, register it and generate the module file.
|
# If not, register it and generate the module file.
|
||||||
@@ -600,9 +600,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
if node is spec:
|
if node is spec:
|
||||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||||
elif source_pkg_dir:
|
elif source_pkg_dir:
|
||||||
fs.install_tree(
|
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||||
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||||
@@ -1363,8 +1361,8 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
|||||||
self._update_installed(task)
|
self._update_installed(task)
|
||||||
|
|
||||||
# Only update the explicit entry once for the explicit package
|
# Only update the explicit entry once for the explicit package
|
||||||
if task.explicit and not rec.explicit:
|
if task.explicit:
|
||||||
spack.store.STORE.db.mark(task.pkg.spec, "explicit", True)
|
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||||
|
|
||||||
def _cleanup_all_tasks(self) -> None:
|
def _cleanup_all_tasks(self) -> None:
|
||||||
"""Cleanup all build tasks to include releasing their locks."""
|
"""Cleanup all build tasks to include releasing their locks."""
|
||||||
@@ -2348,9 +2346,7 @@ def _install_source(self) -> None:
|
|||||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||||
tty.debug(f"{self.pre} Copying source to {src_target}")
|
tty.debug(f"{self.pre} Copying source to {src_target}")
|
||||||
|
|
||||||
fs.install_tree(
|
fs.install_tree(pkg.stage.source_path, src_target)
|
||||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
|
||||||
)
|
|
||||||
|
|
||||||
def _real_install(self) -> None:
|
def _real_install(self) -> None:
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
|||||||
@@ -87,8 +87,9 @@ def from_url(url: str):
|
|||||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'"{url}" is not a valid mirror URL. '
|
'"{}" is not a valid mirror URL. Scheme must be once of {}.'.format(
|
||||||
f"Scheme must be one of {supported_url_schemes}."
|
url, ", ".join(supported_url_schemes)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return Mirror(url)
|
return Mirror(url)
|
||||||
|
|
||||||
@@ -733,7 +734,7 @@ def require_mirror_name(mirror_name):
|
|||||||
"""Find a mirror by name and raise if it does not exist"""
|
"""Find a mirror by name and raise if it does not exist"""
|
||||||
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
||||||
if not mirror:
|
if not mirror:
|
||||||
raise ValueError(f'no mirror named "{mirror_name}"')
|
raise ValueError('no mirror named "{0}"'.format(mirror_name))
|
||||||
return mirror
|
return mirror
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,22 +3,12 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from ._operating_system import OperatingSystem
|
from ._operating_system import OperatingSystem
|
||||||
from .cray_backend import CrayBackend
|
|
||||||
from .cray_frontend import CrayFrontend
|
|
||||||
from .freebsd import FreeBSDOs
|
from .freebsd import FreeBSDOs
|
||||||
from .linux_distro import LinuxDistro
|
from .linux_distro import LinuxDistro
|
||||||
from .mac_os import MacOs
|
from .mac_os import MacOs
|
||||||
from .windows_os import WindowsOs
|
from .windows_os import WindowsOs
|
||||||
|
|
||||||
__all__ = [
|
__all__ = ["OperatingSystem", "LinuxDistro", "MacOs", "WindowsOs", "FreeBSDOs"]
|
||||||
"OperatingSystem",
|
|
||||||
"LinuxDistro",
|
|
||||||
"MacOs",
|
|
||||||
"CrayFrontend",
|
|
||||||
"CrayBackend",
|
|
||||||
"WindowsOs",
|
|
||||||
"FreeBSDOs",
|
|
||||||
]
|
|
||||||
|
|
||||||
#: List of all the Operating Systems known to Spack
|
#: List of all the Operating Systems known to Spack
|
||||||
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs, FreeBSDOs]
|
operating_systems = [LinuxDistro, MacOs, WindowsOs, FreeBSDOs]
|
||||||
|
|||||||
@@ -1,172 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack.error
|
|
||||||
import spack.version
|
|
||||||
from spack.util.module_cmd import module
|
|
||||||
|
|
||||||
from .linux_distro import LinuxDistro
|
|
||||||
|
|
||||||
#: Possible locations of the Cray CLE release file,
|
|
||||||
#: which we look at to get the CNL OS version.
|
|
||||||
_cle_release_file = "/etc/opt/cray/release/cle-release"
|
|
||||||
_clerelease_file = "/etc/opt/cray/release/clerelease"
|
|
||||||
|
|
||||||
|
|
||||||
def read_cle_release_file():
|
|
||||||
"""Read the CLE release file and return a dict with its attributes.
|
|
||||||
|
|
||||||
This file is present on newer versions of Cray.
|
|
||||||
|
|
||||||
The release file looks something like this::
|
|
||||||
|
|
||||||
RELEASE=6.0.UP07
|
|
||||||
BUILD=6.0.7424
|
|
||||||
...
|
|
||||||
|
|
||||||
The dictionary we produce looks like this::
|
|
||||||
|
|
||||||
{
|
|
||||||
"RELEASE": "6.0.UP07",
|
|
||||||
"BUILD": "6.0.7424",
|
|
||||||
...
|
|
||||||
}
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: dictionary of release attributes
|
|
||||||
"""
|
|
||||||
with open(_cle_release_file) as release_file:
|
|
||||||
result = {}
|
|
||||||
for line in release_file:
|
|
||||||
# use partition instead of split() to ensure we only split on
|
|
||||||
# the first '=' in the line.
|
|
||||||
key, _, value = line.partition("=")
|
|
||||||
result[key] = value.strip()
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def read_clerelease_file():
|
|
||||||
"""Read the CLE release file and return the Cray OS version.
|
|
||||||
|
|
||||||
This file is present on older versions of Cray.
|
|
||||||
|
|
||||||
The release file looks something like this::
|
|
||||||
|
|
||||||
5.2.UP04
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: the Cray OS version
|
|
||||||
"""
|
|
||||||
with open(_clerelease_file) as release_file:
|
|
||||||
for line in release_file:
|
|
||||||
return line.strip()
|
|
||||||
|
|
||||||
|
|
||||||
class CrayBackend(LinuxDistro):
|
|
||||||
"""Compute Node Linux (CNL) is the operating system used for the Cray XC
|
|
||||||
series super computers. It is a very stripped down version of GNU/Linux.
|
|
||||||
Any compilers found through this operating system will be used with
|
|
||||||
modules. If updated, user must make sure that version and name are
|
|
||||||
updated to indicate that OS has been upgraded (or downgraded)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
name = "cnl"
|
|
||||||
version = self._detect_crayos_version()
|
|
||||||
if version:
|
|
||||||
# If we found a CrayOS version, we do not want the information
|
|
||||||
# from LinuxDistro. In order to skip the logic from
|
|
||||||
# distro.linux_distribution, while still calling __init__
|
|
||||||
# methods further up the MRO, we skip LinuxDistro in the MRO and
|
|
||||||
# call the OperatingSystem superclass __init__ method
|
|
||||||
super(LinuxDistro, self).__init__(name, version)
|
|
||||||
else:
|
|
||||||
super().__init__()
|
|
||||||
self.modulecmd = module
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.name + str(self.version)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _detect_crayos_version(cls):
|
|
||||||
if os.path.isfile(_cle_release_file):
|
|
||||||
release_attrs = read_cle_release_file()
|
|
||||||
if "RELEASE" not in release_attrs:
|
|
||||||
# This Cray system uses a base OS not CLE/CNL
|
|
||||||
return None
|
|
||||||
v = spack.version.Version(release_attrs["RELEASE"])
|
|
||||||
return v[0]
|
|
||||||
elif os.path.isfile(_clerelease_file):
|
|
||||||
v = read_clerelease_file()
|
|
||||||
return spack.version.Version(v)[0]
|
|
||||||
else:
|
|
||||||
# Not all Cray systems run CNL on the backend.
|
|
||||||
# Systems running in what Cray calls "cluster" mode run other
|
|
||||||
# linux OSs under the Cray PE.
|
|
||||||
# So if we don't detect any Cray OS version on the system,
|
|
||||||
# we return None. We can't ever be sure we will get a Cray OS
|
|
||||||
# version.
|
|
||||||
# Returning None allows the calling code to test for the value
|
|
||||||
# being "True-ish" rather than requiring a try/except block.
|
|
||||||
return None
|
|
||||||
|
|
||||||
def arguments_to_detect_version_fn(self, paths):
|
|
||||||
import spack.compilers
|
|
||||||
|
|
||||||
command_arguments = []
|
|
||||||
for compiler_name in spack.compilers.supported_compilers():
|
|
||||||
cmp_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
|
||||||
|
|
||||||
# If the compiler doesn't have a corresponding
|
|
||||||
# Programming Environment, skip to the next
|
|
||||||
if cmp_cls.PrgEnv is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if cmp_cls.PrgEnv_compiler is None:
|
|
||||||
tty.die("Must supply PrgEnv_compiler with PrgEnv")
|
|
||||||
|
|
||||||
compiler_id = spack.compilers.CompilerID(self, compiler_name, None)
|
|
||||||
detect_version_args = spack.compilers.DetectVersionArgs(
|
|
||||||
id=compiler_id, variation=(None, None), language="cc", path="cc"
|
|
||||||
)
|
|
||||||
command_arguments.append(detect_version_args)
|
|
||||||
return command_arguments
|
|
||||||
|
|
||||||
def detect_version(self, detect_version_args):
|
|
||||||
import spack.compilers
|
|
||||||
|
|
||||||
modulecmd = self.modulecmd
|
|
||||||
compiler_name = detect_version_args.id.compiler_name
|
|
||||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
|
||||||
output = modulecmd("avail", compiler_cls.PrgEnv_compiler)
|
|
||||||
version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
|
|
||||||
matches = re.findall(version_regex, output)
|
|
||||||
version = tuple(version for _, version in matches if "classic" not in version)
|
|
||||||
compiler_id = detect_version_args.id
|
|
||||||
value = detect_version_args._replace(id=compiler_id._replace(version=version))
|
|
||||||
return value, None
|
|
||||||
|
|
||||||
def make_compilers(self, compiler_id, paths):
|
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
name = compiler_id.compiler_name
|
|
||||||
cmp_cls = spack.compilers.class_for_compiler_name(name)
|
|
||||||
compilers = []
|
|
||||||
for v in compiler_id.version:
|
|
||||||
comp = cmp_cls(
|
|
||||||
spack.spec.CompilerSpec(name + "@=" + v),
|
|
||||||
self,
|
|
||||||
"any",
|
|
||||||
["cc", "CC", "ftn"],
|
|
||||||
[cmp_cls.PrgEnv, name + "/" + v],
|
|
||||||
)
|
|
||||||
|
|
||||||
compilers.append(comp)
|
|
||||||
return compilers
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
|
||||||
import llnl.util.lang
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
from spack.util.environment import get_path
|
|
||||||
from spack.util.module_cmd import module
|
|
||||||
|
|
||||||
from .linux_distro import LinuxDistro
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def unload_programming_environment():
|
|
||||||
"""Context manager that unloads Cray Programming Environments."""
|
|
||||||
env_bu = None
|
|
||||||
|
|
||||||
# We rely on the fact that the PrgEnv-* modules set the PE_ENV
|
|
||||||
# environment variable.
|
|
||||||
if "PE_ENV" in os.environ:
|
|
||||||
# Copy environment variables to restore them after the compiler
|
|
||||||
# detection. We expect that the only thing PrgEnv-* modules do is
|
|
||||||
# the environment variables modifications.
|
|
||||||
env_bu = os.environ.copy()
|
|
||||||
|
|
||||||
# Get the name of the module from the environment variable.
|
|
||||||
prg_env = "PrgEnv-" + os.environ["PE_ENV"].lower()
|
|
||||||
|
|
||||||
# Unload the PrgEnv-* module. By doing this we intentionally
|
|
||||||
# provoke errors when the Cray's compiler wrappers are executed
|
|
||||||
# (Error: A PrgEnv-* modulefile must be loaded.) so they will not
|
|
||||||
# be detected as valid compilers by the overridden method. We also
|
|
||||||
# expect that the modules that add the actual compilers' binaries
|
|
||||||
# into the PATH environment variable (i.e. the following modules:
|
|
||||||
# 'intel', 'cce', 'gcc', etc.) will also be unloaded since they are
|
|
||||||
# specified as prerequisites in the PrgEnv-* modulefiles.
|
|
||||||
module("unload", prg_env)
|
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
# Restore the environment.
|
|
||||||
if env_bu is not None:
|
|
||||||
os.environ.clear()
|
|
||||||
os.environ.update(env_bu)
|
|
||||||
|
|
||||||
|
|
||||||
class CrayFrontend(LinuxDistro):
|
|
||||||
"""Represents OS that runs on login and service nodes of the Cray platform.
|
|
||||||
It acts as a regular Linux without Cray-specific modules and compiler
|
|
||||||
wrappers."""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def compiler_search_paths(self):
|
|
||||||
"""Calls the default function but unloads Cray's programming
|
|
||||||
environments first.
|
|
||||||
|
|
||||||
This prevents from detecting Cray compiler wrappers and avoids
|
|
||||||
possible false detections.
|
|
||||||
"""
|
|
||||||
import spack.compilers
|
|
||||||
|
|
||||||
with unload_programming_environment():
|
|
||||||
search_paths = get_path("PATH")
|
|
||||||
|
|
||||||
extract_path_re = re.compile(r"prepend-path[\s]*PATH[\s]*([/\w\.:-]*)")
|
|
||||||
|
|
||||||
for compiler_cls in spack.compilers.all_compiler_types():
|
|
||||||
# Check if the compiler class is supported on Cray
|
|
||||||
prg_env = getattr(compiler_cls, "PrgEnv", None)
|
|
||||||
compiler_module = getattr(compiler_cls, "PrgEnv_compiler", None)
|
|
||||||
if not (prg_env and compiler_module):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# It is supported, check which versions are available
|
|
||||||
output = module("avail", compiler_cls.PrgEnv_compiler)
|
|
||||||
version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
|
|
||||||
matches = re.findall(version_regex, output)
|
|
||||||
versions = tuple(version for _, version in matches if "classic" not in version)
|
|
||||||
|
|
||||||
# Now inspect the modules and add to paths
|
|
||||||
msg = "[CRAY FE] Detected FE compiler [name={0}, versions={1}]"
|
|
||||||
tty.debug(msg.format(compiler_module, versions))
|
|
||||||
for v in versions:
|
|
||||||
try:
|
|
||||||
current_module = compiler_module + "/" + v
|
|
||||||
out = module("show", current_module)
|
|
||||||
match = extract_path_re.search(out)
|
|
||||||
search_paths += match.group(1).split(":")
|
|
||||||
except Exception as e:
|
|
||||||
msg = (
|
|
||||||
"[CRAY FE] An unexpected error occurred while "
|
|
||||||
"detecting FE compiler [compiler={0}, "
|
|
||||||
" version={1}, error={2}]"
|
|
||||||
)
|
|
||||||
tty.debug(msg.format(compiler_cls.name, v, str(e)))
|
|
||||||
|
|
||||||
search_paths = list(llnl.util.lang.dedupe(search_paths))
|
|
||||||
return fs.search_paths_for_executables(*search_paths)
|
|
||||||
@@ -199,10 +199,10 @@ def __init__(cls, name, bases, attr_dict):
|
|||||||
# assumed to be detectable
|
# assumed to be detectable
|
||||||
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
||||||
# Append a tag to each detectable package, so that finding them is faster
|
# Append a tag to each detectable package, so that finding them is faster
|
||||||
if not hasattr(cls, "tags"):
|
if hasattr(cls, "tags"):
|
||||||
|
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
|
||||||
|
else:
|
||||||
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
||||||
elif DetectablePackageMeta.TAG not in cls.tags:
|
|
||||||
cls.tags.append(DetectablePackageMeta.TAG)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def platform_executables(cls):
|
def platform_executables(cls):
|
||||||
@@ -621,10 +621,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
|||||||
#: By default do not run tests within package's install()
|
#: By default do not run tests within package's install()
|
||||||
run_tests = False
|
run_tests = False
|
||||||
|
|
||||||
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
|
||||||
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
|
||||||
keep_werror: Optional[str] = None
|
|
||||||
|
|
||||||
#: Most packages are NOT extendable. Set to True if you want extensions.
|
#: Most packages are NOT extendable. Set to True if you want extensions.
|
||||||
extendable = False
|
extendable = False
|
||||||
|
|
||||||
@@ -930,6 +926,32 @@ def global_license_file(self):
|
|||||||
self.global_license_dir, self.name, os.path.basename(self.license_files[0])
|
self.global_license_dir, self.name, os.path.basename(self.license_files[0])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# NOTE: return type should be Optional[Literal['all', 'specific', 'none']] in
|
||||||
|
# Python 3.8+, but we still support 3.6.
|
||||||
|
@property
|
||||||
|
def keep_werror(self) -> Optional[str]:
|
||||||
|
"""Keep ``-Werror`` flags, matches ``config:flags:keep_werror`` to override config.
|
||||||
|
|
||||||
|
Valid return values are:
|
||||||
|
* ``"all"``: keep all ``-Werror`` flags.
|
||||||
|
* ``"specific"``: keep only ``-Werror=specific-warning`` flags.
|
||||||
|
* ``"none"``: filter out all ``-Werror*`` flags.
|
||||||
|
* ``None``: respect the user's configuration (``"none"`` by default).
|
||||||
|
"""
|
||||||
|
if self.spec.satisfies("%nvhpc@:23.3") or self.spec.satisfies("%pgi"):
|
||||||
|
# Filtering works by replacing -Werror with -Wno-error, but older nvhpc and
|
||||||
|
# PGI do not understand -Wno-error, so we disable filtering.
|
||||||
|
return "all"
|
||||||
|
|
||||||
|
elif self.spec.satisfies("%nvhpc@23.4:"):
|
||||||
|
# newer nvhpc supports -Wno-error but can't disable specific warnings with
|
||||||
|
# -Wno-error=warning. Skip -Werror=warning, but still filter -Werror.
|
||||||
|
return "specific"
|
||||||
|
|
||||||
|
else:
|
||||||
|
# use -Werror disablement by default for other compilers
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
if not self.spec.versions.concrete:
|
if not self.spec.versions.concrete:
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
|
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
from .cray import Cray
|
|
||||||
from .darwin import Darwin
|
from .darwin import Darwin
|
||||||
from .freebsd import FreeBSD
|
from .freebsd import FreeBSD
|
||||||
from .linux import Linux
|
from .linux import Linux
|
||||||
@@ -15,7 +14,6 @@
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Platform",
|
"Platform",
|
||||||
"Cray",
|
|
||||||
"Darwin",
|
"Darwin",
|
||||||
"Linux",
|
"Linux",
|
||||||
"FreeBSD",
|
"FreeBSD",
|
||||||
|
|||||||
@@ -8,7 +8,6 @@
|
|||||||
|
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
|
|
||||||
from .cray import Cray
|
|
||||||
from .darwin import Darwin
|
from .darwin import Darwin
|
||||||
from .freebsd import FreeBSD
|
from .freebsd import FreeBSD
|
||||||
from .linux import Linux
|
from .linux import Linux
|
||||||
@@ -16,7 +15,7 @@
|
|||||||
from .windows import Windows
|
from .windows import Windows
|
||||||
|
|
||||||
#: List of all the platform classes known to Spack
|
#: List of all the platform classes known to Spack
|
||||||
platforms = [Cray, Darwin, Linux, Windows, FreeBSD, Test]
|
platforms = [Darwin, Linux, Windows, FreeBSD, Test]
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
|
|||||||
@@ -2,254 +2,10 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
|
||||||
import os.path
|
import os.path
|
||||||
import platform
|
|
||||||
import re
|
|
||||||
|
|
||||||
import archspec.cpu
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.symlink import readlink
|
|
||||||
|
|
||||||
import spack.target
|
|
||||||
import spack.version
|
|
||||||
from spack.operating_systems.cray_backend import CrayBackend
|
|
||||||
from spack.operating_systems.cray_frontend import CrayFrontend
|
|
||||||
from spack.paths import build_env_path
|
|
||||||
from spack.util.executable import Executable
|
|
||||||
from spack.util.module_cmd import module
|
|
||||||
|
|
||||||
from ._platform import NoPlatformError, Platform
|
|
||||||
|
|
||||||
_craype_name_to_target_name = {
|
|
||||||
"x86-cascadelake": "cascadelake",
|
|
||||||
"x86-naples": "zen",
|
|
||||||
"x86-rome": "zen2",
|
|
||||||
"x86-milan": "zen3",
|
|
||||||
"x86-skylake": "skylake_avx512",
|
|
||||||
"mic-knl": "mic_knl",
|
|
||||||
"interlagos": "bulldozer",
|
|
||||||
"abudhabi": "piledriver",
|
|
||||||
}
|
|
||||||
|
|
||||||
_ex_craype_dir = "/opt/cray/pe/cpe"
|
|
||||||
_xc_craype_dir = "/opt/cray/pe/cdt"
|
|
||||||
|
|
||||||
|
|
||||||
def slingshot_network():
|
def slingshot_network():
|
||||||
return os.path.exists("/opt/cray/pe") and (
|
return os.path.exists("/opt/cray/pe") and (
|
||||||
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _target_name_from_craype_target_name(name):
|
|
||||||
return _craype_name_to_target_name.get(name, name)
|
|
||||||
|
|
||||||
|
|
||||||
class Cray(Platform):
|
|
||||||
priority = 10
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Create a Cray system platform.
|
|
||||||
|
|
||||||
Target names should use craype target names but not include the
|
|
||||||
'craype-' prefix. Uses first viable target from:
|
|
||||||
self
|
|
||||||
envars [SPACK_FRONT_END, SPACK_BACK_END]
|
|
||||||
configuration file "targets.yaml" with keys 'front_end', 'back_end'
|
|
||||||
scanning /etc/bash/bashrc.local for back_end only
|
|
||||||
"""
|
|
||||||
super().__init__("cray")
|
|
||||||
|
|
||||||
# Make all craype targets available.
|
|
||||||
for target in self._avail_targets():
|
|
||||||
name = _target_name_from_craype_target_name(target)
|
|
||||||
self.add_target(name, spack.target.Target(name, "craype-%s" % target))
|
|
||||||
|
|
||||||
self.back_end = os.environ.get("SPACK_BACK_END", self._default_target_from_env())
|
|
||||||
self.default = self.back_end
|
|
||||||
if self.back_end not in self.targets:
|
|
||||||
# We didn't find a target module for the backend
|
|
||||||
raise NoPlatformError()
|
|
||||||
|
|
||||||
# Setup frontend targets
|
|
||||||
for name in archspec.cpu.TARGETS:
|
|
||||||
if name not in self.targets:
|
|
||||||
self.add_target(name, spack.target.Target(name))
|
|
||||||
self.front_end = os.environ.get("SPACK_FRONT_END", archspec.cpu.host().name)
|
|
||||||
if self.front_end not in self.targets:
|
|
||||||
self.add_target(self.front_end, spack.target.Target(self.front_end))
|
|
||||||
|
|
||||||
front_distro = CrayFrontend()
|
|
||||||
back_distro = CrayBackend()
|
|
||||||
|
|
||||||
self.default_os = str(back_distro)
|
|
||||||
self.back_os = self.default_os
|
|
||||||
self.front_os = str(front_distro)
|
|
||||||
|
|
||||||
self.add_operating_system(self.back_os, back_distro)
|
|
||||||
if self.front_os != self.back_os:
|
|
||||||
self.add_operating_system(self.front_os, front_distro)
|
|
||||||
|
|
||||||
def setup_platform_environment(self, pkg, env):
|
|
||||||
"""Change the linker to default dynamic to be more
|
|
||||||
similar to linux/standard linker behavior
|
|
||||||
"""
|
|
||||||
# Unload these modules to prevent any silent linking or unnecessary
|
|
||||||
# I/O profiling in the case of darshan.
|
|
||||||
modules_to_unload = ["cray-mpich", "darshan", "cray-libsci", "altd"]
|
|
||||||
for mod in modules_to_unload:
|
|
||||||
module("unload", mod)
|
|
||||||
|
|
||||||
env.set("CRAYPE_LINK_TYPE", "dynamic")
|
|
||||||
cray_wrapper_names = os.path.join(build_env_path, "cray")
|
|
||||||
|
|
||||||
if os.path.isdir(cray_wrapper_names):
|
|
||||||
env.prepend_path("PATH", cray_wrapper_names)
|
|
||||||
env.prepend_path("SPACK_ENV_PATH", cray_wrapper_names)
|
|
||||||
|
|
||||||
# Makes spack installed pkg-config work on Crays
|
|
||||||
env.append_path("PKG_CONFIG_PATH", "/usr/lib64/pkgconfig")
|
|
||||||
env.append_path("PKG_CONFIG_PATH", "/usr/local/lib64/pkgconfig")
|
|
||||||
|
|
||||||
# CRAY_LD_LIBRARY_PATH is used at build time by the cray compiler
|
|
||||||
# wrappers to augment LD_LIBRARY_PATH. This is to avoid long load
|
|
||||||
# times at runtime. This behavior is not always respected on cray
|
|
||||||
# "cluster" systems, so we reproduce it here.
|
|
||||||
if os.environ.get("CRAY_LD_LIBRARY_PATH"):
|
|
||||||
env.prepend_path("LD_LIBRARY_PATH", os.environ["CRAY_LD_LIBRARY_PATH"])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def craype_type_and_version(cls):
|
|
||||||
if os.path.isdir(_ex_craype_dir):
|
|
||||||
craype_dir = _ex_craype_dir
|
|
||||||
craype_type = "EX"
|
|
||||||
elif os.path.isdir(_xc_craype_dir):
|
|
||||||
craype_dir = _xc_craype_dir
|
|
||||||
craype_type = "XC"
|
|
||||||
else:
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
# Take the default version from known symlink path
|
|
||||||
default_path = os.path.join(craype_dir, "default")
|
|
||||||
if os.path.islink(default_path):
|
|
||||||
version = spack.version.Version(readlink(default_path))
|
|
||||||
return (craype_type, version)
|
|
||||||
|
|
||||||
# If no default version, sort available versions and return latest
|
|
||||||
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
|
|
||||||
versions_available.sort(reverse=True)
|
|
||||||
if not versions_available:
|
|
||||||
return (craype_type, None)
|
|
||||||
return (craype_type, versions_available[0])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def detect(cls):
|
|
||||||
"""
|
|
||||||
Detect whether this system requires CrayPE module support.
|
|
||||||
|
|
||||||
Systems with newer CrayPE (21.10 for EX systems, future work for CS and
|
|
||||||
XC systems) have compilers and MPI wrappers that can be used directly
|
|
||||||
by path. These systems are considered ``linux`` platforms.
|
|
||||||
|
|
||||||
For systems running an older CrayPE, we detect the Cray platform based
|
|
||||||
on the availability through `module` of the Cray programming
|
|
||||||
environment. If this environment is available, we can use it to find
|
|
||||||
compilers, target modules, etc. If the Cray programming environment is
|
|
||||||
not available via modules, then we will treat it as a standard linux
|
|
||||||
system, as the Cray compiler wrappers and other components of the Cray
|
|
||||||
programming environment are irrelevant without module support.
|
|
||||||
"""
|
|
||||||
if "opt/cray" not in os.environ.get("MODULEPATH", ""):
|
|
||||||
return False
|
|
||||||
|
|
||||||
craype_type, craype_version = cls.craype_type_and_version()
|
|
||||||
if craype_type == "XC":
|
|
||||||
return True
|
|
||||||
if craype_type == "EX" and craype_version < spack.version.Version("21.10"):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _default_target_from_env(self):
|
|
||||||
"""Set and return the default CrayPE target loaded in a clean login
|
|
||||||
session.
|
|
||||||
|
|
||||||
A bash subshell is launched with a wiped environment and the list of
|
|
||||||
loaded modules is parsed for the first acceptable CrayPE target.
|
|
||||||
"""
|
|
||||||
# env -i /bin/bash -lc echo $CRAY_CPU_TARGET 2> /dev/null
|
|
||||||
if getattr(self, "default", None) is None:
|
|
||||||
bash = Executable("/bin/bash")
|
|
||||||
output = bash(
|
|
||||||
"--norc",
|
|
||||||
"--noprofile",
|
|
||||||
"-lc",
|
|
||||||
"echo $CRAY_CPU_TARGET",
|
|
||||||
env={"TERM": os.environ.get("TERM", "")},
|
|
||||||
output=str,
|
|
||||||
error=os.devnull,
|
|
||||||
)
|
|
||||||
|
|
||||||
default_from_module = "".join(output.split()) # rm all whitespace
|
|
||||||
if default_from_module:
|
|
||||||
tty.debug("Found default module:%s" % default_from_module)
|
|
||||||
return default_from_module
|
|
||||||
else:
|
|
||||||
front_end = archspec.cpu.host()
|
|
||||||
# Look for the frontend architecture or closest ancestor
|
|
||||||
# available in cray target modules
|
|
||||||
avail = [_target_name_from_craype_target_name(x) for x in self._avail_targets()]
|
|
||||||
for front_end_possibility in [front_end] + front_end.ancestors:
|
|
||||||
if front_end_possibility.name in avail:
|
|
||||||
tty.debug("using front-end architecture or available ancestor")
|
|
||||||
return front_end_possibility.name
|
|
||||||
else:
|
|
||||||
tty.debug("using platform.machine as default")
|
|
||||||
return platform.machine()
|
|
||||||
|
|
||||||
def _avail_targets(self):
|
|
||||||
"""Return a list of available CrayPE CPU targets."""
|
|
||||||
|
|
||||||
def modules_in_output(output):
|
|
||||||
"""Returns a list of valid modules parsed from modulecmd output"""
|
|
||||||
return [i for i in re.split(r"\s\s+|\n", output)]
|
|
||||||
|
|
||||||
def target_names_from_modules(modules):
|
|
||||||
# Craype- module prefixes that are not valid CPU targets.
|
|
||||||
targets = []
|
|
||||||
for mod in modules:
|
|
||||||
if "craype-" in mod:
|
|
||||||
name = mod[7:]
|
|
||||||
name = name.split()[0]
|
|
||||||
_n = name.replace("-", "_") # test for mic-knl/mic_knl
|
|
||||||
is_target_name = name in archspec.cpu.TARGETS or _n in archspec.cpu.TARGETS
|
|
||||||
is_cray_target_name = name in _craype_name_to_target_name
|
|
||||||
if is_target_name or is_cray_target_name:
|
|
||||||
targets.append(name)
|
|
||||||
|
|
||||||
return targets
|
|
||||||
|
|
||||||
def modules_from_listdir():
|
|
||||||
craype_default_path = "/opt/cray/pe/craype/default/modulefiles"
|
|
||||||
if os.path.isdir(craype_default_path):
|
|
||||||
return os.listdir(craype_default_path)
|
|
||||||
return []
|
|
||||||
|
|
||||||
if getattr(self, "_craype_targets", None) is None:
|
|
||||||
strategies = [
|
|
||||||
lambda: modules_in_output(module("avail", "-t", "craype-")),
|
|
||||||
modules_from_listdir,
|
|
||||||
]
|
|
||||||
for available_craype_modules in strategies:
|
|
||||||
craype_modules = available_craype_modules()
|
|
||||||
craype_targets = target_names_from_modules(craype_modules)
|
|
||||||
if craype_targets:
|
|
||||||
self._craype_targets = craype_targets
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# If nothing is found add platform.machine()
|
|
||||||
# to avoid Spack erroring out
|
|
||||||
self._craype_targets = [platform.machine()]
|
|
||||||
|
|
||||||
return self._craype_targets
|
|
||||||
|
|||||||
@@ -33,7 +33,6 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -50,8 +49,6 @@
|
|||||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||||
|
|
||||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
|
||||||
|
|
||||||
|
|
||||||
def python_package_for_repo(namespace):
|
def python_package_for_repo(namespace):
|
||||||
"""Returns the full namespace of a repository, given its relative one
|
"""Returns the full namespace of a repository, given its relative one
|
||||||
@@ -912,49 +909,17 @@ def __contains__(self, pkg_name):
|
|||||||
return self.exists(pkg_name)
|
return self.exists(pkg_name)
|
||||||
|
|
||||||
|
|
||||||
def _parse_package_api_version(
|
|
||||||
config: Dict[str, Any],
|
|
||||||
min_api: Tuple[int, int] = spack.min_package_api_version,
|
|
||||||
max_api: Tuple[int, int] = spack.package_api_version,
|
|
||||||
) -> Tuple[int, int]:
|
|
||||||
api = config.get("api")
|
|
||||||
if api is None:
|
|
||||||
package_api = (1, 0)
|
|
||||||
else:
|
|
||||||
if not isinstance(api, str):
|
|
||||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
|
||||||
api_match = _API_REGEX.match(api)
|
|
||||||
if api_match is None:
|
|
||||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
|
||||||
package_api = (int(api_match.group(1)), int(api_match.group(2)))
|
|
||||||
|
|
||||||
if min_api <= package_api <= max_api:
|
|
||||||
return package_api
|
|
||||||
|
|
||||||
min_str = ".".join(str(i) for i in min_api)
|
|
||||||
max_str = ".".join(str(i) for i in max_api)
|
|
||||||
curr_str = ".".join(str(i) for i in package_api)
|
|
||||||
raise BadRepoError(
|
|
||||||
f"Package API v{curr_str} is not supported by this version of Spack ("
|
|
||||||
f"must be between v{min_str} and v{max_str})"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Repo:
|
class Repo:
|
||||||
"""Class representing a package repository in the filesystem.
|
"""Class representing a package repository in the filesystem.
|
||||||
|
|
||||||
Each package repository must have a top-level configuration file called `repo.yaml`.
|
Each package repository must have a top-level configuration file
|
||||||
|
called `repo.yaml`.
|
||||||
|
|
||||||
It contains the following keys:
|
Currently, `repo.yaml` this must define:
|
||||||
|
|
||||||
`namespace`:
|
`namespace`:
|
||||||
A Python namespace where the repository's packages should live.
|
A Python namespace where the repository's packages should live.
|
||||||
|
|
||||||
`api`:
|
|
||||||
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
|
|
||||||
For the repo to be compatible with the current version of Spack, the version must be
|
|
||||||
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
|
|
||||||
:py:data:`spack.package_api_version`.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, root, cache=None):
|
def __init__(self, root, cache=None):
|
||||||
@@ -983,7 +948,7 @@ def check(condition, msg):
|
|||||||
"%s must define a namespace." % os.path.join(root, repo_config_name),
|
"%s must define a namespace." % os.path.join(root, repo_config_name),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.namespace: str = config["namespace"]
|
self.namespace = config["namespace"]
|
||||||
check(
|
check(
|
||||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||||
("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root))
|
("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root))
|
||||||
@@ -996,15 +961,13 @@ def check(condition, msg):
|
|||||||
# Keep name components around for checking prefixes.
|
# Keep name components around for checking prefixes.
|
||||||
self._names = self.full_namespace.split(".")
|
self._names = self.full_namespace.split(".")
|
||||||
|
|
||||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||||
self.packages_path = os.path.join(self.root, packages_dir)
|
self.packages_path = os.path.join(self.root, packages_dir)
|
||||||
check(
|
check(
|
||||||
os.path.isdir(self.packages_path),
|
os.path.isdir(self.packages_path),
|
||||||
"No directory '%s' found in '%s'" % (packages_dir, root),
|
"No directory '%s' found in '%s'" % (packages_dir, root),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.package_api = _parse_package_api_version(config)
|
|
||||||
|
|
||||||
# These are internal cache variables.
|
# These are internal cache variables.
|
||||||
self._modules = {}
|
self._modules = {}
|
||||||
self._classes = {}
|
self._classes = {}
|
||||||
@@ -1047,7 +1010,7 @@ def is_prefix(self, fullname):
|
|||||||
parts = fullname.split(".")
|
parts = fullname.split(".")
|
||||||
return self._names[: len(parts)] == parts
|
return self._names[: len(parts)] == parts
|
||||||
|
|
||||||
def _read_config(self) -> Dict[str, Any]:
|
def _read_config(self):
|
||||||
"""Check for a YAML config file in this db's root directory."""
|
"""Check for a YAML config file in this db's root directory."""
|
||||||
try:
|
try:
|
||||||
with open(self.config_file) as reponame_file:
|
with open(self.config_file) as reponame_file:
|
||||||
|
|||||||
@@ -52,10 +52,7 @@
|
|||||||
"target": {"type": "string"},
|
"target": {"type": "string"},
|
||||||
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||||
"modules": {
|
"modules": {
|
||||||
"anyOf": [
|
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
|
||||||
{"type": "null"},
|
|
||||||
{"type": "array", "items": {"type": "string"}},
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"implicit_rpaths": {
|
"implicit_rpaths": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
r"\w[\w-]*": {
|
r"\w[\w-]*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": ["spec"],
|
|
||||||
"properties": {"spec": {"type": "string"}, "path": {"type": "string"}},
|
"properties": {"spec": {"type": "string"}, "path": {"type": "string"}},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -116,6 +116,8 @@ class Provenance(enum.IntEnum):
|
|||||||
PACKAGE_PY = enum.auto()
|
PACKAGE_PY = enum.auto()
|
||||||
# An installed spec
|
# An installed spec
|
||||||
INSTALLED = enum.auto()
|
INSTALLED = enum.auto()
|
||||||
|
# lower provenance for installed git refs so concretizer prefers StandardVersion installs
|
||||||
|
INSTALLED_GIT_VERSION = enum.auto()
|
||||||
# A runtime injected from another package (e.g. a compiler)
|
# A runtime injected from another package (e.g. a compiler)
|
||||||
RUNTIME = enum.auto()
|
RUNTIME = enum.auto()
|
||||||
|
|
||||||
@@ -1433,14 +1435,16 @@ def condition(
|
|||||||
# caller, we won't emit partial facts.
|
# caller, we won't emit partial facts.
|
||||||
|
|
||||||
condition_id = next(self._id_counter)
|
condition_id = next(self._id_counter)
|
||||||
|
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||||
|
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||||
|
|
||||||
trigger_id = self._get_condition_id(
|
trigger_id = self._get_condition_id(
|
||||||
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
||||||
)
|
)
|
||||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
|
||||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
|
||||||
self.gen.fact(
|
self.gen.fact(
|
||||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||||
)
|
)
|
||||||
|
|
||||||
if not imposed_spec:
|
if not imposed_spec:
|
||||||
return condition_id
|
return condition_id
|
||||||
|
|
||||||
@@ -1689,43 +1693,19 @@ def external_packages(self):
|
|||||||
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
||||||
]
|
]
|
||||||
|
|
||||||
selected_externals = set()
|
external_specs = []
|
||||||
if spec_filters:
|
if spec_filters:
|
||||||
for current_filter in spec_filters:
|
for current_filter in spec_filters:
|
||||||
current_filter.factory = lambda: candidate_specs
|
current_filter.factory = lambda: candidate_specs
|
||||||
selected_externals.update(current_filter.selected_specs())
|
external_specs.extend(current_filter.selected_specs())
|
||||||
|
else:
|
||||||
# Emit facts for externals specs. Note that "local_idx" is the index of the spec
|
external_specs.extend(candidate_specs)
|
||||||
# in packages:<pkg_name>:externals. This means:
|
|
||||||
#
|
|
||||||
# packages:<pkg_name>:externals[local_idx].spec == spec
|
|
||||||
external_versions = []
|
|
||||||
for local_idx, spec in enumerate(candidate_specs):
|
|
||||||
msg = f"{spec.name} available as external when satisfying {spec}"
|
|
||||||
|
|
||||||
if spec_filters and spec not in selected_externals:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not spec.versions.concrete:
|
|
||||||
warnings.warn(f"cannot use the external spec {spec}: needs a concrete version")
|
|
||||||
continue
|
|
||||||
|
|
||||||
def external_imposition(input_spec, requirements):
|
|
||||||
return requirements + [
|
|
||||||
fn.attr("external_conditions_hold", input_spec.name, local_idx)
|
|
||||||
]
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
|
||||||
except (spack.error.SpecError, RuntimeError) as e:
|
|
||||||
warnings.warn(f"while setting up external spec {spec}: {e}")
|
|
||||||
continue
|
|
||||||
external_versions.append((spec.version, local_idx))
|
|
||||||
self.possible_versions[spec.name].add(spec.version)
|
|
||||||
self.gen.newline()
|
|
||||||
|
|
||||||
# Order the external versions to prefer more recent versions
|
# Order the external versions to prefer more recent versions
|
||||||
# even if specs in packages.yaml are not ordered that way
|
# even if specs in packages.yaml are not ordered that way
|
||||||
|
external_versions = [
|
||||||
|
(x.version, external_id) for external_id, x in enumerate(external_specs)
|
||||||
|
]
|
||||||
external_versions = [
|
external_versions = [
|
||||||
(v, idx, external_id)
|
(v, idx, external_id)
|
||||||
for idx, (v, external_id) in enumerate(sorted(external_versions, reverse=True))
|
for idx, (v, external_id) in enumerate(sorted(external_versions, reverse=True))
|
||||||
@@ -1735,6 +1715,19 @@ def external_imposition(input_spec, requirements):
|
|||||||
DeclaredVersion(version=version, idx=idx, origin=Provenance.EXTERNAL)
|
DeclaredVersion(version=version, idx=idx, origin=Provenance.EXTERNAL)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Declare external conditions with a local index into packages.yaml
|
||||||
|
for local_idx, spec in enumerate(external_specs):
|
||||||
|
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||||
|
|
||||||
|
def external_imposition(input_spec, requirements):
|
||||||
|
return requirements + [
|
||||||
|
fn.attr("external_conditions_hold", input_spec.name, local_idx)
|
||||||
|
]
|
||||||
|
|
||||||
|
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
||||||
|
self.possible_versions[spec.name].add(spec.version)
|
||||||
|
self.gen.newline()
|
||||||
|
|
||||||
self.trigger_rules()
|
self.trigger_rules()
|
||||||
self.effect_rules()
|
self.effect_rules()
|
||||||
|
|
||||||
@@ -1921,9 +1914,12 @@ def _spec_clauses(
|
|||||||
for flag_type, flags in spec.compiler_flags.items():
|
for flag_type, flags in spec.compiler_flags.items():
|
||||||
for flag in flags:
|
for flag in flags:
|
||||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||||
clauses.append(f.node_flag_source(spec.name, flag_type, spec.name))
|
|
||||||
if not spec.concrete and flag.propagate is True:
|
if not spec.concrete and flag.propagate is True:
|
||||||
clauses.append(f.node_flag_propagate(spec.name, flag_type))
|
clauses.append(
|
||||||
|
f.propagate(
|
||||||
|
spec.name, fn.node_flag(flag_type, flag), fn.edge_types("link", "run")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
if spec.concrete:
|
if spec.concrete:
|
||||||
@@ -2076,7 +2072,7 @@ def define_ad_hoc_versions_from_specs(
|
|||||||
# best possible, so they're guaranteed to be used preferentially.
|
# best possible, so they're guaranteed to be used preferentially.
|
||||||
version = s.versions.concrete
|
version = s.versions.concrete
|
||||||
|
|
||||||
if version is None or any(v == version for v in self.possible_versions[s.name]):
|
if version is None or (any((v == version) for v in self.possible_versions[s.name])):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if require_checksum and not _is_checksummed_git_version(version):
|
if require_checksum and not _is_checksummed_git_version(version):
|
||||||
@@ -2390,9 +2386,16 @@ def concrete_specs(self):
|
|||||||
# - Add OS to possible OS's
|
# - Add OS to possible OS's
|
||||||
for dep in spec.traverse():
|
for dep in spec.traverse():
|
||||||
self.possible_versions[dep.name].add(dep.version)
|
self.possible_versions[dep.name].add(dep.version)
|
||||||
self.declared_versions[dep.name].append(
|
if isinstance(dep.version, vn.GitVersion):
|
||||||
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
self.declared_versions[dep.name].append(
|
||||||
)
|
DeclaredVersion(
|
||||||
|
version=dep.version, idx=0, origin=Provenance.INSTALLED_GIT_VERSION
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.declared_versions[dep.name].append(
|
||||||
|
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
||||||
|
)
|
||||||
self.possible_oses.add(dep.os)
|
self.possible_oses.add(dep.os)
|
||||||
|
|
||||||
def define_concrete_input_specs(self, specs, possible):
|
def define_concrete_input_specs(self, specs, possible):
|
||||||
@@ -2444,7 +2447,7 @@ def setup(
|
|||||||
|
|
||||||
if using_libc_compatibility():
|
if using_libc_compatibility():
|
||||||
for libc in self.libcs:
|
for libc in self.libcs:
|
||||||
self.gen.fact(fn.allowed_libc(libc.name, libc.version))
|
self.gen.fact(fn.host_libc(libc.name, libc.version))
|
||||||
|
|
||||||
if not allow_deprecated:
|
if not allow_deprecated:
|
||||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||||
@@ -2741,8 +2744,6 @@ class _Head:
|
|||||||
node_compiler = fn.attr("node_compiler_set")
|
node_compiler = fn.attr("node_compiler_set")
|
||||||
node_compiler_version = fn.attr("node_compiler_version_set")
|
node_compiler_version = fn.attr("node_compiler_version_set")
|
||||||
node_flag = fn.attr("node_flag_set")
|
node_flag = fn.attr("node_flag_set")
|
||||||
node_flag_source = fn.attr("node_flag_source")
|
|
||||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
|
||||||
propagate = fn.attr("propagate")
|
propagate = fn.attr("propagate")
|
||||||
|
|
||||||
|
|
||||||
@@ -2758,8 +2759,6 @@ class _Body:
|
|||||||
node_compiler = fn.attr("node_compiler")
|
node_compiler = fn.attr("node_compiler")
|
||||||
node_compiler_version = fn.attr("node_compiler_version")
|
node_compiler_version = fn.attr("node_compiler_version")
|
||||||
node_flag = fn.attr("node_flag")
|
node_flag = fn.attr("node_flag")
|
||||||
node_flag_source = fn.attr("node_flag_source")
|
|
||||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
|
||||||
propagate = fn.attr("propagate")
|
propagate = fn.attr("propagate")
|
||||||
|
|
||||||
|
|
||||||
@@ -3346,6 +3345,8 @@ def hash(self, node, h):
|
|||||||
def node(self, node):
|
def node(self, node):
|
||||||
if node not in self._specs:
|
if node not in self._specs:
|
||||||
self._specs[node] = spack.spec.Spec(node.pkg)
|
self._specs[node] = spack.spec.Spec(node.pkg)
|
||||||
|
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||||
|
self._specs[node].compiler_flags[flag_type] = []
|
||||||
|
|
||||||
def _arch(self, node):
|
def _arch(self, node):
|
||||||
arch = self._specs[node].architecture
|
arch = self._specs[node].architecture
|
||||||
@@ -3398,9 +3399,6 @@ def node_flag(self, node, flag_type, flag):
|
|||||||
def node_flag_source(self, node, flag_type, source):
|
def node_flag_source(self, node, flag_type, source):
|
||||||
self._flag_sources[(node, flag_type)].add(source)
|
self._flag_sources[(node, flag_type)].add(source)
|
||||||
|
|
||||||
def no_flags(self, node, flag_type):
|
|
||||||
self._specs[node].compiler_flags[flag_type] = []
|
|
||||||
|
|
||||||
def external_spec_selected(self, node, idx):
|
def external_spec_selected(self, node, idx):
|
||||||
"""This means that the external spec and index idx has been selected for this package."""
|
"""This means that the external spec and index idx has been selected for this package."""
|
||||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||||
@@ -3493,7 +3491,7 @@ def reorder_flags(self):
|
|||||||
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
||||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||||
|
|
||||||
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_compiler_flags))
|
msg = f"{set(compiler_flags)} does not equal {set(ordered_compiler_flags)}"
|
||||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||||
|
|
||||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||||
@@ -3563,9 +3561,8 @@ def build_specs(self, function_tuples):
|
|||||||
# do not bother calling actions on it except for node_flag_source,
|
# do not bother calling actions on it except for node_flag_source,
|
||||||
# since node_flag_source is tracking information not in the spec itself
|
# since node_flag_source is tracking information not in the spec itself
|
||||||
spec = self._specs.get(args[0])
|
spec = self._specs.get(args[0])
|
||||||
if spec and spec.concrete:
|
if spec and spec.concrete and name != "node_flag_source":
|
||||||
if name != "node_flag_source":
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
action(*args)
|
action(*args)
|
||||||
|
|
||||||
@@ -3836,12 +3833,6 @@ class Solver:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.driver = PyclingoDriver()
|
self.driver = PyclingoDriver()
|
||||||
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
|
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
|
||||||
if spack.platforms.host().name == "cray":
|
|
||||||
msg = (
|
|
||||||
"The Cray platform, i.e. 'platform=cray', will be removed in Spack v0.23. "
|
|
||||||
"All Cray machines will be then detected as 'platform=linux'."
|
|
||||||
)
|
|
||||||
warnings.warn(msg)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _check_input_and_extract_concrete_specs(specs):
|
def _check_input_and_extract_concrete_specs(specs):
|
||||||
|
|||||||
@@ -29,7 +29,6 @@
|
|||||||
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
|
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
|
||||||
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
|
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
|
||||||
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
|
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
|
||||||
:- attr("no_flags", PackageNode, _), not attr("node", PackageNode).
|
|
||||||
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
|
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
|
||||||
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
|
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
|
||||||
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
|
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
|
||||||
@@ -256,6 +255,7 @@ possible_version_weight(node(ID, Package), Weight)
|
|||||||
:- attr("version", node(ID, Package), Version),
|
:- attr("version", node(ID, Package), Version),
|
||||||
version_weight(node(ID, Package), Weight),
|
version_weight(node(ID, Package), Weight),
|
||||||
not pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
not pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
||||||
|
not pkg_fact(Package, version_declared(Version, Weight, "installed_git_version")),
|
||||||
not build(node(ID, Package)),
|
not build(node(ID, Package)),
|
||||||
internal_error("Build version weight used for reused package").
|
internal_error("Build version weight used for reused package").
|
||||||
|
|
||||||
@@ -964,12 +964,19 @@ pkg_fact(Package, variant_single_value("dev_path"))
|
|||||||
|
|
||||||
% Propagation roots have a corresponding attr("propagate", ...)
|
% Propagation roots have a corresponding attr("propagate", ...)
|
||||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
||||||
|
propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes).
|
||||||
|
|
||||||
|
|
||||||
% Propagate an attribute along edges to child nodes
|
% Propagate an attribute along edges to child nodes
|
||||||
propagate(ChildNode, PropagatedAttribute) :-
|
propagate(ChildNode, PropagatedAttribute) :-
|
||||||
propagate(ParentNode, PropagatedAttribute),
|
propagate(ParentNode, PropagatedAttribute),
|
||||||
depends_on(ParentNode, ChildNode).
|
depends_on(ParentNode, ChildNode).
|
||||||
|
|
||||||
|
propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :-
|
||||||
|
propagate(ParentNode, PropagatedAttribute, edge_types(DepType1, DepType2)),
|
||||||
|
depends_on(ParentNode, ChildNode),
|
||||||
|
1 { attr("depends_on", ParentNode, ChildNode, DepType1); attr("depends_on", ParentNode, ChildNode, DepType2) }.
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
% Activation of propagated values
|
% Activation of propagated values
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
@@ -995,6 +1002,33 @@ variant_is_propagated(PackageNode, Variant) :-
|
|||||||
attr("variant_value", PackageNode, Variant, Value),
|
attr("variant_value", PackageNode, Variant, Value),
|
||||||
not propagate(PackageNode, variant_value(Variant, Value)).
|
not propagate(PackageNode, variant_value(Variant, Value)).
|
||||||
|
|
||||||
|
%----
|
||||||
|
% Flags
|
||||||
|
%----
|
||||||
|
|
||||||
|
% A propagated flag implies:
|
||||||
|
% 1. The same flag type is not set on this node
|
||||||
|
% 2. This node has the same compiler as the propagation source
|
||||||
|
|
||||||
|
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag), SourceNode) :-
|
||||||
|
propagate(node(PackageID, Package), node_flag(FlagType, Flag), _),
|
||||||
|
not attr("node_flag_set", node(PackageID, Package), FlagType, _),
|
||||||
|
% Same compiler as propagation source
|
||||||
|
node_compiler(node(PackageID, Package), CompilerID),
|
||||||
|
node_compiler(SourceNode, CompilerID),
|
||||||
|
attr("propagate", SourceNode, node_flag(FlagType, Flag), _),
|
||||||
|
node(PackageID, Package) != SourceNode,
|
||||||
|
not runtime(Package).
|
||||||
|
|
||||||
|
attr("node_flag", PackageNode, FlagType, Flag) :- propagated_flag(PackageNode, node_flag(FlagType, Flag), _).
|
||||||
|
attr("node_flag_source", PackageNode, FlagType, SourceNode) :- propagated_flag(PackageNode, node_flag(FlagType, _), SourceNode).
|
||||||
|
|
||||||
|
% Cannot propagate the same flag from two distinct sources
|
||||||
|
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||||
|
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source1)),
|
||||||
|
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source2)),
|
||||||
|
Source1 < Source2.
|
||||||
|
|
||||||
%----
|
%----
|
||||||
% Compiler constraints
|
% Compiler constraints
|
||||||
%----
|
%----
|
||||||
@@ -1128,11 +1162,8 @@ target_weight(Target, 0)
|
|||||||
node_target_weight(PackageNode, MinWeight)
|
node_target_weight(PackageNode, MinWeight)
|
||||||
:- attr("node", PackageNode),
|
:- attr("node", PackageNode),
|
||||||
attr("node_target", PackageNode, Target),
|
attr("node_target", PackageNode, Target),
|
||||||
target(Target),
|
|
||||||
MinWeight = #min { Weight : target_weight(Target, Weight) }.
|
MinWeight = #min { Weight : target_weight(Target, Weight) }.
|
||||||
|
|
||||||
:- attr("node_target", PackageNode, Target), not node_target_weight(PackageNode, _).
|
|
||||||
|
|
||||||
% compatibility rules for targets among nodes
|
% compatibility rules for targets among nodes
|
||||||
node_target_match(ParentNode, DependencyNode)
|
node_target_match(ParentNode, DependencyNode)
|
||||||
:- attr("depends_on", ParentNode, DependencyNode, Type), Type != "build",
|
:- attr("depends_on", ParentNode, DependencyNode, Type), Type != "build",
|
||||||
@@ -1279,45 +1310,9 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
|
|||||||
% Compiler flags
|
% Compiler flags
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
% propagate flags when compiler match
|
|
||||||
can_inherit_flags(PackageNode, DependencyNode, FlagType)
|
|
||||||
:- same_compiler(PackageNode, DependencyNode),
|
|
||||||
not attr("node_flag_set", DependencyNode, FlagType, _),
|
|
||||||
flag_type(FlagType).
|
|
||||||
|
|
||||||
same_compiler(PackageNode, DependencyNode)
|
|
||||||
:- depends_on(PackageNode, DependencyNode),
|
|
||||||
node_compiler(PackageNode, CompilerID),
|
|
||||||
node_compiler(DependencyNode, CompilerID),
|
|
||||||
compiler_id(CompilerID).
|
|
||||||
|
|
||||||
node_flag_inherited(DependencyNode, FlagType, Flag)
|
|
||||||
:- attr("node_flag_set", PackageNode, FlagType, Flag),
|
|
||||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
|
||||||
attr("node_flag_propagate", PackageNode, FlagType).
|
|
||||||
|
|
||||||
% Ensure propagation
|
|
||||||
:- node_flag_inherited(PackageNode, FlagType, Flag),
|
|
||||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
|
||||||
attr("node_flag_propagate", PackageNode, FlagType).
|
|
||||||
|
|
||||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
|
||||||
depends_on(Source1, Package),
|
|
||||||
depends_on(Source2, Package),
|
|
||||||
attr("node_flag_propagate", Source1, FlagType),
|
|
||||||
attr("node_flag_propagate", Source2, FlagType),
|
|
||||||
can_inherit_flags(Source1, Package, FlagType),
|
|
||||||
can_inherit_flags(Source2, Package, FlagType),
|
|
||||||
Source1 < Source2.
|
|
||||||
|
|
||||||
% remember where flags came from
|
% remember where flags came from
|
||||||
attr("node_flag_source", PackageNode, FlagType, PackageNode)
|
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag_set", PackageNode, FlagType, _).
|
||||||
:- attr("node_flag_set", PackageNode, FlagType, _).
|
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag", PackageNode, FlagType, _), attr("hash", PackageNode, _).
|
||||||
|
|
||||||
attr("node_flag_source", DependencyNode, FlagType, Q)
|
|
||||||
:- attr("node_flag_source", PackageNode, FlagType, Q),
|
|
||||||
node_flag_inherited(DependencyNode, FlagType, _),
|
|
||||||
attr("node_flag_propagate", PackageNode, FlagType).
|
|
||||||
|
|
||||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||||
attr("node_flag", PackageNode, FlagType, Flag)
|
attr("node_flag", PackageNode, FlagType, Flag)
|
||||||
@@ -1337,15 +1332,8 @@ attr("node_flag_compiler_default", PackageNode)
|
|||||||
compiler_name(CompilerID, CompilerName),
|
compiler_name(CompilerID, CompilerName),
|
||||||
compiler_version(CompilerID, Version).
|
compiler_version(CompilerID, Version).
|
||||||
|
|
||||||
% if a flag is set to something or inherited, it's included
|
% Flag set to something
|
||||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
||||||
attr("node_flag", PackageNode, FlagType, Flag) :- node_flag_inherited(PackageNode, FlagType, Flag).
|
|
||||||
|
|
||||||
% if no node flags are set for a type, there are no flags.
|
|
||||||
attr("no_flags", PackageNode, FlagType)
|
|
||||||
:- not attr("node_flag", PackageNode, FlagType, _),
|
|
||||||
attr("node", PackageNode),
|
|
||||||
flag_type(FlagType).
|
|
||||||
|
|
||||||
#defined compiler_flag/3.
|
#defined compiler_flag/3.
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user