Compare commits
704 Commits
v0.22.2
...
containers
Author | SHA1 | Date | |
---|---|---|---|
![]() |
bdcd817233 | ||
![]() |
d39edeb9a1 | ||
![]() |
453fb27be2 | ||
![]() |
831f04fb7d | ||
![]() |
04044a9744 | ||
![]() |
8077285a63 | ||
![]() |
537926c1a7 | ||
![]() |
02ff3d7b1e | ||
![]() |
491cb278f3 | ||
![]() |
ed1ebefd8f | ||
![]() |
36d64fcbd4 | ||
![]() |
c5cdc2c0a2 | ||
![]() |
0eca86f64f | ||
![]() |
50027d76a5 | ||
![]() |
b4748de5a9 | ||
![]() |
8f2532c624 | ||
![]() |
0726513334 | ||
![]() |
f951f38883 | ||
![]() |
5262412c13 | ||
![]() |
f022b93249 | ||
![]() |
60b5e98182 | ||
![]() |
682acae9fd | ||
![]() |
c0f80e9117 | ||
![]() |
dcf13af459 | ||
![]() |
a2e4fb6b95 | ||
![]() |
9dd92f493a | ||
![]() |
b23e832002 | ||
![]() |
ae2f626168 | ||
![]() |
a73930da81 | ||
![]() |
921d446196 | ||
![]() |
8e2ea5de9d | ||
![]() |
fd0baca222 | ||
![]() |
d2fc0d6a35 | ||
![]() |
eedc9e0eaf | ||
![]() |
6b85f6b405 | ||
![]() |
5686a6b928 | ||
![]() |
ad665c6af1 | ||
![]() |
d78d6db61e | ||
![]() |
f47c307bf4 | ||
![]() |
5b4edb9499 | ||
![]() |
a6e6093922 | ||
![]() |
2e8b4e660e | ||
![]() |
0ca1ee8b91 | ||
![]() |
a322672259 | ||
![]() |
6cab86d0c1 | ||
![]() |
86e7e2e070 | ||
![]() |
69fca439f4 | ||
![]() |
3b90fb589f | ||
![]() |
fff126204c | ||
![]() |
98e626cf67 | ||
![]() |
d8fe628a95 | ||
![]() |
4c378840e3 | ||
![]() |
18de6a480b | ||
![]() |
c6da4d586b | ||
![]() |
61d6fc70e8 | ||
![]() |
7c65655c7e | ||
![]() |
76ca264b72 | ||
![]() |
aa58d3c170 | ||
![]() |
a32b898a00 | ||
![]() |
6fcd43ee64 | ||
![]() |
f1f9f00d43 | ||
![]() |
8ff27f9257 | ||
![]() |
78c62532c7 | ||
![]() |
a7f327dced | ||
![]() |
310c435396 | ||
![]() |
fa3f27e8e7 | ||
![]() |
6b0fefff29 | ||
![]() |
f613316282 | ||
![]() |
1b5b74390f | ||
![]() |
b57f88cb89 | ||
![]() |
03afc2a1e6 | ||
![]() |
1f6ed9324d | ||
![]() |
5559772afa | ||
![]() |
8728631fe0 | ||
![]() |
e34d9cbe5f | ||
![]() |
0efba09990 | ||
![]() |
a9cb80d792 | ||
![]() |
dae6fe711c | ||
![]() |
c9a24bc6c5 | ||
![]() |
00663f29a9 | ||
![]() |
15a48990b6 | ||
![]() |
af0b898c2e | ||
![]() |
ddf8384bc6 | ||
![]() |
670f92f42b | ||
![]() |
c81b0e3d2a | ||
![]() |
f56d804d85 | ||
![]() |
b57f08f22b | ||
![]() |
34f3b8fdd0 | ||
![]() |
0b3b49b4e0 | ||
![]() |
fa96422702 | ||
![]() |
e12168ed24 | ||
![]() |
c0f2df8e0a | ||
![]() |
8807ade98f | ||
![]() |
13356ddbcc | ||
![]() |
974033be80 | ||
![]() |
8755fc7291 | ||
![]() |
17c02fe759 | ||
![]() |
4c7d18a772 | ||
![]() |
b28b26c39a | ||
![]() |
23aed605ec | ||
![]() |
fdb8d565aa | ||
![]() |
9b08296236 | ||
![]() |
c82d8c63fa | ||
![]() |
7a8989bbfc | ||
![]() |
22c86074c8 | ||
![]() |
ef9e449322 | ||
![]() |
6b73195478 | ||
![]() |
c7b9bf6a77 | ||
![]() |
a84c91b259 | ||
![]() |
e6566dfd67 | ||
![]() |
d6419f32b8 | ||
![]() |
60ed682577 | ||
![]() |
6c1fa8c30b | ||
![]() |
09167fe8ac | ||
![]() |
eb7951818d | ||
![]() |
6959656d51 | ||
![]() |
f916b50491 | ||
![]() |
7160e1d3e7 | ||
![]() |
16369d50a7 | ||
![]() |
35fc371222 | ||
![]() |
4bbf4a5e79 | ||
![]() |
e5bd79b011 | ||
![]() |
2267b40bda | ||
![]() |
b0b6016e12 | ||
![]() |
c24265fe7e | ||
![]() |
9d0102ac89 | ||
![]() |
52b8b3ed8d | ||
![]() |
380030c59a | ||
![]() |
867a813328 | ||
![]() |
ded3fa50a3 | ||
![]() |
84653e8d9f | ||
![]() |
ac0fd7138f | ||
![]() |
c0f9f47b8c | ||
![]() |
7e9d24a145 | ||
![]() |
99405e6a4d | ||
![]() |
7500a4853c | ||
![]() |
54d192e026 | ||
![]() |
e6a8eba72d | ||
![]() |
4d604c8c9f | ||
![]() |
b081e0046f | ||
![]() |
baf82c0245 | ||
![]() |
884a0a392d | ||
![]() |
824f2a5652 | ||
![]() |
b894acf1fc | ||
![]() |
536856874c | ||
![]() |
8d1aaef8b8 | ||
![]() |
3216c4362e | ||
![]() |
2e822e65fd | ||
![]() |
41fde4db8c | ||
![]() |
81125c3bd8 | ||
![]() |
d8b0df6f5b | ||
![]() |
99e3fdb180 | ||
![]() |
64cfdc07cb | ||
![]() |
59fbbdd9ce | ||
![]() |
adedf58297 | ||
![]() |
f13ea8aa75 | ||
![]() |
109a4d52b5 | ||
![]() |
7f2117e2cf | ||
![]() |
dfab3e8829 | ||
![]() |
822622f07a | ||
![]() |
c4194e4f58 | ||
![]() |
05c7ff4595 | ||
![]() |
a6ac78c7c6 | ||
![]() |
8479122e71 | ||
![]() |
3d91bfea75 | ||
![]() |
27e28b33ee | ||
![]() |
eaf8ac7407 | ||
![]() |
1e9d550bc6 | ||
![]() |
0282fe9efd | ||
![]() |
ad3d9c83fe | ||
![]() |
910b923c5d | ||
![]() |
c1f1e1396d | ||
![]() |
3139dbdd39 | ||
![]() |
cc6dcdcab9 | ||
![]() |
57b83e5fb2 | ||
![]() |
55fe73586e | ||
![]() |
3be497344a | ||
![]() |
05413689b9 | ||
![]() |
50da223888 | ||
![]() |
719b260cf1 | ||
![]() |
3848c41494 | ||
![]() |
da720cafd8 | ||
![]() |
8f0b029308 | ||
![]() |
901f4b789d | ||
![]() |
6e6fef1b0e | ||
![]() |
ae131a5c7c | ||
![]() |
ed59e43e1d | ||
![]() |
5e8f9ed1c7 | ||
![]() |
9c31ff74c4 | ||
![]() |
90c8fe0182 | ||
![]() |
58db81c323 | ||
![]() |
e867008819 | ||
![]() |
9910e06b25 | ||
![]() |
a3ece7ff4d | ||
![]() |
00f0ca2060 | ||
![]() |
35557ac21c | ||
![]() |
54662f7ae1 | ||
![]() |
9cdb2a8dbb | ||
![]() |
ffdab20294 | ||
![]() |
e91a69a756 | ||
![]() |
7665076339 | ||
![]() |
49ba2d84a0 | ||
![]() |
0cec923e0a | ||
![]() |
b97b001dad | ||
![]() |
113e231abe | ||
![]() |
f43ca7a554 | ||
![]() |
3c2c215619 | ||
![]() |
a2b3a004bf | ||
![]() |
f650133f83 | ||
![]() |
81f9d5baa5 | ||
![]() |
3938a85ff8 | ||
![]() |
84cb604b19 | ||
![]() |
093504d9a0 | ||
![]() |
70a93a746d | ||
![]() |
4326efddbf | ||
![]() |
6f51b543f0 | ||
![]() |
3316e49ad3 | ||
![]() |
0c4a91cd18 | ||
![]() |
07c6c3ebac | ||
![]() |
e3a4a07616 | ||
![]() |
57467139e5 | ||
![]() |
e8bc53f37b | ||
![]() |
3b78515fd4 | ||
![]() |
6b052c3af9 | ||
![]() |
37e2d46d7d | ||
![]() |
a389eb5a08 | ||
![]() |
d57f174ca3 | ||
![]() |
e6ae42b1eb | ||
![]() |
d911b9c48d | ||
![]() |
3b35b7f4fa | ||
![]() |
a82fb33b31 | ||
![]() |
5f35a90529 | ||
![]() |
81c620b61b | ||
![]() |
12866eb0d6 | ||
![]() |
24b8d0666e | ||
![]() |
0f2c7248c8 | ||
![]() |
8cc4ad3ac5 | ||
![]() |
9ba90e322e | ||
![]() |
d9c6b40d8e | ||
![]() |
d792e1f052 | ||
![]() |
2d464f8c89 | ||
![]() |
456a8e3553 | ||
![]() |
db94696cf0 | ||
![]() |
72bb656b9e | ||
![]() |
e092026eb8 | ||
![]() |
e5f5749d67 | ||
![]() |
6e4f8ea7e4 | ||
![]() |
5e8eff24d2 | ||
![]() |
36f1801eb8 | ||
![]() |
e3deee57ba | ||
![]() |
404deb99f4 | ||
![]() |
f594bc7aea | ||
![]() |
9ed948523a | ||
![]() |
72d7c2d558 | ||
![]() |
d4a7582955 | ||
![]() |
42ac1f0cb2 | ||
![]() |
4af61d432f | ||
![]() |
52bdaa7bf5 | ||
![]() |
96b42238c5 | ||
![]() |
c7bd259739 | ||
![]() |
0dfc360b1e | ||
![]() |
5e578e2e4e | ||
![]() |
93111d495b | ||
![]() |
b1bbe240d7 | ||
![]() |
bbb58ff4c6 | ||
![]() |
7a710bee17 | ||
![]() |
bc3903d0e0 | ||
![]() |
61c8326180 | ||
![]() |
c5caa4b838 | ||
![]() |
ebdff73c8c | ||
![]() |
f78beb71f7 | ||
![]() |
74210c7f46 | ||
![]() |
eff4451cdd | ||
![]() |
8d0fc3e639 | ||
![]() |
3736da3f89 | ||
![]() |
221e464df3 | ||
![]() |
bac5253169 | ||
![]() |
fc2ee5cae8 | ||
![]() |
e11f83f34b | ||
![]() |
6e7fb9a308 | ||
![]() |
b156a62a44 | ||
![]() |
990e77c55f | ||
![]() |
c2fb529819 | ||
![]() |
337bf3b944 | ||
![]() |
a49b2f4f16 | ||
![]() |
ddcf1a4b2e | ||
![]() |
82a932c078 | ||
![]() |
0781615117 | ||
![]() |
9151fc1653 | ||
![]() |
3a83b21ce1 | ||
![]() |
cfc042d901 | ||
![]() |
211ad9e7d9 | ||
![]() |
437b259829 | ||
![]() |
f524bba869 | ||
![]() |
2f31fb5f17 | ||
![]() |
c3567cb199 | ||
![]() |
ae4c1d11f7 | ||
![]() |
cbab451c1a | ||
![]() |
9cec17ca26 | ||
![]() |
d9c5d91b6f | ||
![]() |
6e194c6ffe | ||
![]() |
8f0c28037b | ||
![]() |
31aabcabf7 | ||
![]() |
ca9531d205 | ||
![]() |
794c5eb6a0 | ||
![]() |
c6cc125e22 | ||
![]() |
528c1ed9ba | ||
![]() |
52cc603245 | ||
![]() |
5e55af2dce | ||
![]() |
24ee7c8928 | ||
![]() |
605df09ae1 | ||
![]() |
4aebef900c | ||
![]() |
59c5bef165 | ||
![]() |
a18adf74bf | ||
![]() |
6426ab1b7e | ||
![]() |
7d1de58378 | ||
![]() |
82a54378d8 | ||
![]() |
e6e8fada8b | ||
![]() |
7b541ac322 | ||
![]() |
b0a2ea3970 | ||
![]() |
cb439a09dd | ||
![]() |
f87ee334c2 | ||
![]() |
e8f8cf8543 | ||
![]() |
8c93fb747b | ||
![]() |
1701e929bc | ||
![]() |
1bb3e04263 | ||
![]() |
e91ae19ec4 | ||
![]() |
818ae08c61 | ||
![]() |
15f32f2ca1 | ||
![]() |
59aa62ea5c | ||
![]() |
b4c292ddd0 | ||
![]() |
a25655446a | ||
![]() |
cf3d59bb2e | ||
![]() |
f80287166e | ||
![]() |
329dc40b98 | ||
![]() |
8328c34a3e | ||
![]() |
6c309d3bc9 | ||
![]() |
24b49eee83 | ||
![]() |
715fab340f | ||
![]() |
8231e84d15 | ||
![]() |
8dc91a7a5c | ||
![]() |
4c195b1a06 | ||
![]() |
c6fb6bf5f8 | ||
![]() |
ed6161b80c | ||
![]() |
93424e4565 | ||
![]() |
ca00e42f1d | ||
![]() |
357ee1c632 | ||
![]() |
cb0a3eaade | ||
![]() |
a20d34b8aa | ||
![]() |
329910a620 | ||
![]() |
c374d04b0d | ||
![]() |
b68cf16c85 | ||
![]() |
391c4cf099 | ||
![]() |
8260599e98 | ||
![]() |
3433c8b8a5 | ||
![]() |
e53bc780e4 | ||
![]() |
53346dbaa6 | ||
![]() |
99994ea245 | ||
![]() |
3ffe02a2fe | ||
![]() |
9b77502360 | ||
![]() |
96a97328cf | ||
![]() |
1a400383c0 | ||
![]() |
4f8ab19355 | ||
![]() |
8919677faf | ||
![]() |
858b185a0e | ||
![]() |
bc738cea32 | ||
![]() |
c2196f7d3a | ||
![]() |
d45c27fdbd | ||
![]() |
173084de19 | ||
![]() |
fd2c5fa247 | ||
![]() |
73e0e9bdff | ||
![]() |
4442414d74 | ||
![]() |
8dbf9005f0 | ||
![]() |
0c3da1b498 | ||
![]() |
278f5818b7 | ||
![]() |
c2e85202c7 | ||
![]() |
b021b12043 | ||
![]() |
89a0c9f4b3 | ||
![]() |
259629c300 | ||
![]() |
1ce2baf7a2 | ||
![]() |
4576a42a0f | ||
![]() |
4fba351b92 | ||
![]() |
706737245a | ||
![]() |
0dbdf49075 | ||
![]() |
641075539c | ||
![]() |
9428d99991 | ||
![]() |
f3cf2e94c4 | ||
![]() |
85f13442d2 | ||
![]() |
f478a65635 | ||
![]() |
eca44600c5 | ||
![]() |
d7a4652554 | ||
![]() |
d85cdd1946 | ||
![]() |
ce3aae1501 | ||
![]() |
90c4f9d463 | ||
![]() |
93ccd81d86 | ||
![]() |
9c5a70ab6c | ||
![]() |
5ef58144cb | ||
![]() |
41ddbdfd90 | ||
![]() |
66924c85a3 | ||
![]() |
a4c3fc138c | ||
![]() |
62ed2a07a7 | ||
![]() |
e7aec9e872 | ||
![]() |
b065c3e11e | ||
![]() |
88b357c453 | ||
![]() |
bb7299c04a | ||
![]() |
7a5bddfd15 | ||
![]() |
50fe769f82 | ||
![]() |
29d39d1adf | ||
![]() |
8dde7f3975 | ||
![]() |
0cd038273e | ||
![]() |
1f5bfe80ed | ||
![]() |
4d2611ad8a | ||
![]() |
21a97dad31 | ||
![]() |
338a01ca6d | ||
![]() |
392396ded4 | ||
![]() |
a336e0edb7 | ||
![]() |
9426fefa00 | ||
![]() |
812192eef5 | ||
![]() |
b8c8e80965 | ||
![]() |
77fd5d8414 | ||
![]() |
82050ed371 | ||
![]() |
a7381a9413 | ||
![]() |
b932783d4d | ||
![]() |
0b51f25034 | ||
![]() |
d6a182fb5d | ||
![]() |
e8635adb21 | ||
![]() |
f242e0fd0c | ||
![]() |
67b5f6b838 | ||
![]() |
9d16f17463 | ||
![]() |
f44f5b0db0 | ||
![]() |
39ace5fc45 | ||
![]() |
0601d6a0c5 | ||
![]() |
11869ff872 | ||
![]() |
6753605807 | ||
![]() |
918db85737 | ||
![]() |
1184de8352 | ||
![]() |
2470fde5d9 | ||
![]() |
abfff43976 | ||
![]() |
230687a501 | ||
![]() |
5ff8908ff3 | ||
![]() |
882e09e50b | ||
![]() |
6753f4a7cb | ||
![]() |
1dc63dbea6 | ||
![]() |
b9dfae4722 | ||
![]() |
70412612c7 | ||
![]() |
cd741c368c | ||
![]() |
16a7bef456 | ||
![]() |
85f62728c6 | ||
![]() |
092dc96e6c | ||
![]() |
2bb20caa5f | ||
![]() |
00bcf935e8 | ||
![]() |
3751372396 | ||
![]() |
e6afeca92f | ||
![]() |
35b9307af6 | ||
![]() |
567f728579 | ||
![]() |
404c5c29a1 | ||
![]() |
63712ba6c6 | ||
![]() |
ef62d47dc7 | ||
![]() |
a4594857fc | ||
![]() |
e77572b753 | ||
![]() |
8c84c5ff66 | ||
![]() |
5d8beaf0ed | ||
![]() |
ac405f3d79 | ||
![]() |
2e30553310 | ||
![]() |
85a61772d8 | ||
![]() |
4007f8726d | ||
![]() |
a097f7791b | ||
![]() |
3d4d89b2c0 | ||
![]() |
e461234865 | ||
![]() |
2c1d5f9844 | ||
![]() |
c4b682b983 | ||
![]() |
de0b784d5a | ||
![]() |
5f38afdfc7 | ||
![]() |
ac67c6e34b | ||
![]() |
72deb53832 | ||
![]() |
7c87253fd8 | ||
![]() |
1136aedd08 | ||
![]() |
24e1b56268 | ||
![]() |
eef6a79b35 | ||
![]() |
556a36cbd7 | ||
![]() |
8aa490d6b7 | ||
![]() |
d9d085da10 | ||
![]() |
d88d720577 | ||
![]() |
1d670ae744 | ||
![]() |
35ad6f52c1 | ||
![]() |
b61bae7640 | ||
![]() |
8b7abace8b | ||
![]() |
5cf98d9564 | ||
![]() |
973a961cb5 | ||
![]() |
868d0cb957 | ||
![]() |
497f3a3832 | ||
![]() |
9843f41bce | ||
![]() |
e54fefc2b7 | ||
![]() |
90c0889533 | ||
![]() |
6696e82ce7 | ||
![]() |
dcc55d53db | ||
![]() |
92000e81b8 | ||
![]() |
125175ae25 | ||
![]() |
f60e548a0d | ||
![]() |
04dc16a6b1 | ||
![]() |
27b90e38db | ||
![]() |
7e5ce3ba48 | ||
![]() |
f5f7cfdc8f | ||
![]() |
3e1a562312 | ||
![]() |
ce4d962faa | ||
![]() |
b9816a97fc | ||
![]() |
f7b9c30456 | ||
![]() |
884620a38a | ||
![]() |
7503a41773 | ||
![]() |
9a5fc6b4a3 | ||
![]() |
a31aeed167 | ||
![]() |
71f542a951 | ||
![]() |
322bd48788 | ||
![]() |
b752fa59d4 | ||
![]() |
d53e4cc426 | ||
![]() |
ee4b7fa3a1 | ||
![]() |
d6f02c86d9 | ||
![]() |
62efde8e3c | ||
![]() |
bda1d94d49 | ||
![]() |
3f472039c5 | ||
![]() |
912ef34206 | ||
![]() |
9c88a48a73 | ||
![]() |
4bf5cc9a9a | ||
![]() |
08834e2b03 | ||
![]() |
8020a111df | ||
![]() |
86fb547f7c | ||
![]() |
b9556c7c44 | ||
![]() |
7bdb106b1b | ||
![]() |
2b191cd7f4 | ||
![]() |
774f0a4e60 | ||
![]() |
faf11efa72 | ||
![]() |
5a99142b41 | ||
![]() |
a3aca0242a | ||
![]() |
72f276fab3 | ||
![]() |
21139945df | ||
![]() |
900bd2f477 | ||
![]() |
29d4a5af44 | ||
![]() |
dd9b7ed6a7 | ||
![]() |
09ff74be62 | ||
![]() |
a94ebfea11 | ||
![]() |
8f5fe1d123 | ||
![]() |
d4fb58efa3 | ||
![]() |
ce900346cc | ||
![]() |
7cb64e465f | ||
![]() |
eb70c9f5b9 | ||
![]() |
a28405700e | ||
![]() |
f8f4d94d7a | ||
![]() |
32dfb522d6 | ||
![]() |
c61c707aa5 | ||
![]() |
60d10848c8 | ||
![]() |
dcd6b530f9 | ||
![]() |
419f0742a0 | ||
![]() |
c99174798b | ||
![]() |
8df2a4b511 | ||
![]() |
c174cf6830 | ||
![]() |
5eebd65366 | ||
![]() |
625f5323c0 | ||
![]() |
e05a32cead | ||
![]() |
c69af5d1e5 | ||
![]() |
1ac2ee8043 | ||
![]() |
36af1c1c73 | ||
![]() |
e2fa087002 | ||
![]() |
df02bfbad2 | ||
![]() |
fecb63843e | ||
![]() |
b33e2d09d3 | ||
![]() |
f8054aa21a | ||
![]() |
8f3a2acc54 | ||
![]() |
d1a20908b8 | ||
![]() |
dd781f7368 | ||
![]() |
9bcc43c4c1 | ||
![]() |
77c83af17d | ||
![]() |
574bd2db99 | ||
![]() |
a76f37da96 | ||
![]() |
9e75f3ec0a | ||
![]() |
4d42d45897 | ||
![]() |
a4b4bfda73 | ||
![]() |
1bcdd3a57e | ||
![]() |
297a3a1bc9 | ||
![]() |
8d01e8c978 | ||
![]() |
6be28aa303 | ||
![]() |
5e38310515 | ||
![]() |
ddfed65485 | ||
![]() |
2a16d8bfa8 | ||
![]() |
6a40a50a29 | ||
![]() |
b2924f68c0 | ||
![]() |
41ffe36636 | ||
![]() |
24edc72252 | ||
![]() |
83b38a26a0 | ||
![]() |
914d785e3b | ||
![]() |
f99f642fa8 | ||
![]() |
e0bf3667e3 | ||
![]() |
a24ca50fed | ||
![]() |
51e9f37252 | ||
![]() |
453900c884 | ||
![]() |
4696459d2d | ||
![]() |
ad1e3231e5 | ||
![]() |
2ef7eb1826 | ||
![]() |
fe86019f9a | ||
![]() |
9dbb18219f | ||
![]() |
451a977de0 | ||
![]() |
e604929a4c | ||
![]() |
9d591f9f7c | ||
![]() |
f8ad915100 | ||
![]() |
cbbabe6920 | ||
![]() |
81fe460194 | ||
![]() |
b894f996c0 | ||
![]() |
1ce09847d9 | ||
![]() |
722d401394 | ||
![]() |
e6f04d5ef9 | ||
![]() |
b8e3ecbf00 | ||
![]() |
d189387c24 | ||
![]() |
9e96ddc5ae | ||
![]() |
543bd189af | ||
![]() |
43291aa723 | ||
![]() |
d0589285f7 | ||
![]() |
d079aaa083 | ||
![]() |
6c65977e0d | ||
![]() |
1b5d786cf5 | ||
![]() |
4cf00645bd | ||
![]() |
e9149cfc3c | ||
![]() |
a5c8111076 | ||
![]() |
c3576f712d | ||
![]() |
410e6a59b7 | ||
![]() |
bd2b2fb75a | ||
![]() |
7ae318efd0 | ||
![]() |
73e9d56647 | ||
![]() |
f87a752b63 | ||
![]() |
ae2fec30c3 | ||
![]() |
1af5564cbe | ||
![]() |
a8f057a701 | ||
![]() |
7f3dd38ccc | ||
![]() |
8e9adefcd5 | ||
![]() |
d276f9700f | ||
![]() |
4f111659ec | ||
![]() |
eaf330f2a8 | ||
![]() |
cdaeb74dc7 | ||
![]() |
fbaac46604 | ||
![]() |
7f6210ee90 | ||
![]() |
63f6e6079a | ||
![]() |
d4fd6caae0 | ||
![]() |
fd3c18b6fd | ||
![]() |
725f427f25 | ||
![]() |
32b3e91ef7 | ||
![]() |
b7e4602268 | ||
![]() |
4a98d4db93 | ||
![]() |
9d6bf373be | ||
![]() |
cff35c4987 | ||
![]() |
d594f84b8f | ||
![]() |
f8f01c336c | ||
![]() |
12e3665df3 | ||
![]() |
fa4778b9fc | ||
![]() |
66d297d420 | ||
![]() |
56251c11f3 | ||
![]() |
40bf9a179b | ||
![]() |
095aba0b9f | ||
![]() |
4270136598 | ||
![]() |
f73d7d2dce | ||
![]() |
567566da08 | ||
![]() |
30a9ab749d | ||
![]() |
8160a96b27 | ||
![]() |
10414d3e6c | ||
![]() |
1d96c09094 | ||
![]() |
e7112fbc6a | ||
![]() |
b79761b7eb | ||
![]() |
3381899c69 | ||
![]() |
c7cf5eabc1 | ||
![]() |
d88fa5cf8e | ||
![]() |
2ed0e3d737 | ||
![]() |
506a40cac1 | ||
![]() |
447739fcef | ||
![]() |
e60f6f4a6e | ||
![]() |
7df35d0da0 | ||
![]() |
71b035ece1 | ||
![]() |
86a134235e | ||
![]() |
24cd0da7fb | ||
![]() |
762833a663 | ||
![]() |
636d479e5f | ||
![]() |
f2184f26fa | ||
![]() |
e1686eef7c | ||
![]() |
314893982e | ||
![]() |
9ab6c30a3d | ||
![]() |
ddf94291d4 | ||
![]() |
5d1038c512 | ||
![]() |
2e40c88d50 | ||
![]() |
2bcba57757 | ||
![]() |
37330e5e2b | ||
![]() |
b4411cf2db | ||
![]() |
65d1ae083c | ||
![]() |
0b8faa3918 | ||
![]() |
f077c7e33b | ||
![]() |
9d7410d22e | ||
![]() |
e295730d0e | ||
![]() |
868327ee14 | ||
![]() |
f5430b16bc | ||
![]() |
2446695113 | ||
![]() |
e0c6cca65c | ||
![]() |
84ed4cd331 | ||
![]() |
f6d50f790e | ||
![]() |
d3c3d23d1e | ||
![]() |
33752c2b55 | ||
![]() |
26759249ca | ||
![]() |
8b4cbbe7b3 | ||
![]() |
be71f9fdc4 | ||
![]() |
05c1e7ecc2 |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
28
.github/workflows/bootstrap.yml
vendored
28
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
@@ -53,27 +53,31 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -84,15 +88,13 @@ jobs:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
brew install tree gawk
|
||||
sudo rm -rf $(command -v gpg gpg2)
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
@@ -121,7 +123,7 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
11
.github/workflows/build-containers.yml
vendored
11
.github/workflows/build-containers.yml
vendored
@@ -40,8 +40,7 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
@@ -56,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -94,10 +93,10 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
@@ -114,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
uses: docker/build-push-action@1a162644f9a7e87d8f4b053101d1d9a712edc18c
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
9
.github/workflows/ci.yaml
vendored
9
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -53,6 +53,13 @@ jobs:
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
4
.github/workflows/style/requirements.txt
vendored
4
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
||||
black==24.4.2
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
flake8==7.1.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.9
|
||||
types-six==1.16.21.20240513
|
||||
vermin==1.6.0
|
||||
|
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -141,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -160,7 +160,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -198,7 +198,7 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -254,7 +254,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
65
CHANGELOG.md
65
CHANGELOG.md
@@ -1,65 +1,3 @@
|
||||
# v0.22.2 (2024-09-21)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
||||
- Support macOS Sequoia (#45018, #45127)
|
||||
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
||||
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
||||
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
||||
- Do not halt concretization on unknown variants in externals (#45326)
|
||||
- Improve validation of `develop` config section (#46485)
|
||||
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
||||
- Improve backwards compatibility in `include_concrete` (#45766)
|
||||
- Fix issue where package tags were sometimes repeated (#45160)
|
||||
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
||||
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
||||
- Remove debug statements in package hash computation (#45235)
|
||||
- Remove redundant clingo warnings (#45269)
|
||||
- Remove hard-coded layout version (#45645)
|
||||
- Do not initialize previous store state in `use_store` (#45268)
|
||||
- Docs improvements (#46475)
|
||||
|
||||
## Package updates
|
||||
- `chapel` major update (#42197, #44931, #45304)
|
||||
|
||||
# v0.22.1 (2024-07-04)
|
||||
|
||||
## Bugfixes
|
||||
- Fix reuse of externals on Linux (#44316)
|
||||
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
||||
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
||||
- Improve version detection of glibc (#44154)
|
||||
- Improve heuristics for solver (#44893, #44976, #45023)
|
||||
- Make strong preferences override reuse (#44373)
|
||||
- Reduce verbosity when C compiler is missing (#44182)
|
||||
- Make missing ccache executable an error when required (#44740)
|
||||
- Make every environment view containing `python` a `venv` (#44382)
|
||||
- Fix external detection for compilers with os but no target (#44156)
|
||||
- Fix version optimization for roots (#44272)
|
||||
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
||||
- Apply fetched patches to develop specs (#44950)
|
||||
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
||||
- Fix issue with long filenames in build caches on Windows (#43851)
|
||||
- Fix formatting issue in `spack audit` (#45045)
|
||||
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
||||
|
||||
## Package updates
|
||||
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
||||
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
||||
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
||||
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
||||
- Remove mesa18 and libosmesa (#44264)
|
||||
- Enforce consistency of `gl` providers (#44307)
|
||||
- Require libiconv for iconv (#44335, #45026).
|
||||
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
||||
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
||||
- py-matplotlib: qualify when to do a post install (#44191)
|
||||
- rust: fix v1.78.0 instructions (#44127)
|
||||
- suite-sparse: improve setting of the `libs` property (#44214)
|
||||
- netlib-lapack: provide blas and lapack together (#44981)
|
||||
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
@@ -381,7 +319,6 @@
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
@@ -411,7 +348,7 @@
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
|
@@ -32,7 +32,7 @@
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
||||
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||
new version of a package does not break existing installations, so many
|
||||
configurations of the same package can coexist.
|
||||
|
||||
|
@@ -22,4 +22,4 @@
|
||||
#
|
||||
# This is compatible across platforms.
|
||||
#
|
||||
exec /usr/bin/env spack python "$@"
|
||||
exec spack python "$@"
|
||||
|
@@ -188,25 +188,27 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
if NOT defined _sp_args (
|
||||
exit /B 0
|
||||
)
|
||||
|
||||
:: If args contain --bat, or -h/--help: just execute.
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
||||
goto :default_case
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
||||
) do %%I
|
||||
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
@@ -1,16 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default configuration for Spack's module file generation.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/modules.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules: {}
|
@@ -1433,22 +1433,12 @@ the reserved keywords ``platform``, ``os`` and ``target``:
|
||||
$ spack install libelf os=ubuntu18.04
|
||||
$ spack install libelf target=broadwell
|
||||
|
||||
or together by using the reserved keyword ``arch``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install libelf arch=cray-CNL10-haswell
|
||||
|
||||
Normally users don't have to bother specifying the architecture if they
|
||||
are installing software for their current host, as in that case the
|
||||
values will be detected automatically. If you need fine-grained control
|
||||
over which packages use which targets (or over *all* packages' default
|
||||
target), see :ref:`package-preferences`.
|
||||
|
||||
.. admonition:: Cray machines
|
||||
|
||||
The situation is a little bit different for Cray machines and a detailed
|
||||
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
|
||||
|
||||
.. _support-for-microarchitectures:
|
||||
|
||||
|
@@ -147,6 +147,15 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
def autoreconf(self, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AutotoolsBuilder(AutotoolsBuilder):
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
|
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
@@ -5,13 +5,14 @@
|
||||
|
||||
.. chain:
|
||||
|
||||
=============================================
|
||||
Chaining Spack Installations (upstreams.yaml)
|
||||
=============================================
|
||||
============================
|
||||
Chaining Spack Installations
|
||||
============================
|
||||
|
||||
You can point your Spack installation to another installation to use any
|
||||
packages that are installed there. To register the other Spack instance,
|
||||
you can add it as an entry to ``upstreams.yaml``:
|
||||
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||
:ref:`configuration-scopes`:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -22,7 +23,8 @@ you can add it as an entry to ``upstreams.yaml``:
|
||||
install_tree: /path/to/another/spack/opt/spack
|
||||
|
||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||
Spack base directory.
|
||||
Spack base directory, or the location of the ``install_tree`` defined
|
||||
in :ref:`config.yaml <config-yaml>`.
|
||||
|
||||
Once the upstream Spack instance has been added, ``spack find`` will
|
||||
automatically check the upstream instance when querying installed packages,
|
||||
|
@@ -203,12 +203,9 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - CentOS Stream9
|
||||
- ``quay.io/centos/centos:stream9``
|
||||
- ``spack/centos-stream9``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
@@ -273,7 +270,7 @@ under the ``container`` attribute of environments:
|
||||
# Sets the base images for the stages where Spack builds the
|
||||
# software or where the software gets installed after being built..
|
||||
images:
|
||||
os: "centos:7"
|
||||
os: "almalinux:9"
|
||||
spack: develop
|
||||
|
||||
# Whether or not to strip binaries
|
||||
@@ -324,32 +321,33 @@ following ``spack.yaml``:
|
||||
|
||||
container:
|
||||
images:
|
||||
os: centos:7
|
||||
spack: 0.15.4
|
||||
os: almalinux:9
|
||||
spack: 0.22.0
|
||||
|
||||
uses ``spack/centos7:0.15.4`` and ``centos:7`` for the stages where the
|
||||
uses ``spack/almalinux9:0.22.0`` and ``almalinux:9`` for the stages where the
|
||||
software is respectively built and installed:
|
||||
|
||||
.. code-block:: docker
|
||||
|
||||
# Build stage with Spack pre-installed and ready to be used
|
||||
FROM spack/centos7:0.15.4 as builder
|
||||
FROM spack/almalinux9:0.22.0 AS builder
|
||||
|
||||
# What we want to install and how we want to install it
|
||||
# is specified in a manifest file (spack.yaml)
|
||||
RUN mkdir /opt/spack-environment \
|
||||
&& (echo "spack:" \
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
RUN mkdir -p /opt/spack-environment && \
|
||||
set -o noclobber \
|
||||
&& (echo spack: \
|
||||
&& echo ' specs:' \
|
||||
&& echo ' - gromacs+mpi' \
|
||||
&& echo ' - mpich' \
|
||||
&& echo ' concretizer:' \
|
||||
&& echo ' unify: true' \
|
||||
&& echo ' config:' \
|
||||
&& echo ' install_tree: /opt/software' \
|
||||
&& echo ' view: /opt/views/view') > /opt/spack-environment/spack.yaml
|
||||
[ ... ]
|
||||
# Bare OS image to run the installed executables
|
||||
FROM centos:7
|
||||
FROM quay.io/almalinuxorg/almalinux:9
|
||||
|
||||
COPY --from=builder /opt/spack-environment /opt/spack-environment
|
||||
COPY --from=builder /opt/software /opt/software
|
||||
|
@@ -1364,187 +1364,6 @@ This will write the private key to the file `dinosaur.priv`.
|
||||
or for help on an issue or the Spack slack.
|
||||
|
||||
|
||||
.. _cray-support:
|
||||
|
||||
-------------
|
||||
Spack on Cray
|
||||
-------------
|
||||
|
||||
Spack differs slightly when used on a Cray system. The architecture spec
|
||||
can differentiate between the front-end and back-end processor and operating system.
|
||||
For example, on Edison at NERSC, the back-end target processor
|
||||
is "Ivy Bridge", so you can specify to use the back-end this way:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib target=ivybridge
|
||||
|
||||
You can also use the operating system to build against the back-end:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib os=CNL10
|
||||
|
||||
Notice that the name includes both the operating system name and the major
|
||||
version number concatenated together.
|
||||
|
||||
Alternatively, if you want to build something for the front-end,
|
||||
you can specify the front-end target processor. The processor for a login node
|
||||
on Edison is "Sandy bridge" so we specify on the command line like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib target=sandybridge
|
||||
|
||||
And the front-end operating system is:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib os=SuSE11
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Cray compiler detection
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack can detect compilers using two methods. For the front-end, we treat
|
||||
everything the same. The difference lies in back-end compiler detection.
|
||||
Back-end compiler detection is made via the Tcl module avail command.
|
||||
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
||||
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
||||
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
||||
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
||||
|
||||
The compilers.yaml config file will also differ. There is a
|
||||
modules section that is filled with the compiler's Programming Environment
|
||||
and module name. On other systems, this field is empty []:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- compiler:
|
||||
modules:
|
||||
- PrgEnv-intel
|
||||
- intel/15.0.109
|
||||
|
||||
As mentioned earlier, the compiler paths will look different on a Cray system.
|
||||
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
||||
compiler are replaced with their respective Cray compiler wrapper names:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
cc: cc
|
||||
cxx: CC
|
||||
f77: ftn
|
||||
fc: ftn
|
||||
|
||||
As opposed to an explicit path to the compiler executable. This allows Spack
|
||||
to call the Cray compiler wrappers during build time.
|
||||
|
||||
For more on compiler configuration, check out :ref:`compiler-config`.
|
||||
|
||||
Spack sets the default Cray link type to dynamic, to better match other
|
||||
other platforms. Individual packages can enable static linking (which is the
|
||||
default outside of Spack on cray systems) using the ``-static`` flag.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting defaults and using Cray modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to use default compilers for each PrgEnv and also be able
|
||||
to load cray external modules, you will need to set up a ``packages.yaml``.
|
||||
|
||||
Here's an example of an external configuration for cray modules:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
all:
|
||||
providers:
|
||||
mpi: [mpich]
|
||||
|
||||
This tells Spack that for whatever package that depends on mpi, load the
|
||||
cray-mpich module into the environment. You can then be able to use whatever
|
||||
environment variables, libraries, etc, that are brought into the environment
|
||||
via module load.
|
||||
|
||||
.. note::
|
||||
|
||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
||||
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
||||
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
||||
compiler wrapper link line).
|
||||
|
||||
You can set the default compiler that Spack can use for each compiler type.
|
||||
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
|
||||
In the compiler field, set the compiler specs in your order of preference.
|
||||
Whenever you build with that compiler type, Spack will concretize to that version.
|
||||
|
||||
Here is an example of a full packages.yaml used at NERSC
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
buildable: False
|
||||
netcdf:
|
||||
externals:
|
||||
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
buildable: False
|
||||
hdf5:
|
||||
externals:
|
||||
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
buildable: False
|
||||
all:
|
||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
||||
providers:
|
||||
mpi: [mpich]
|
||||
|
||||
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
||||
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
||||
for each compiler type for each cray modules. This ensures that for each
|
||||
compiler on our system we can use that external module.
|
||||
|
||||
For more on external packages check out the section :ref:`sec-external-packages`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Linux containers on Cray machines
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack uses environment variables particular to the Cray programming
|
||||
environment to determine which systems are Cray platforms. These
|
||||
environment variables may be propagated into containers that are not
|
||||
using the Cray programming environment.
|
||||
|
||||
To ensure that Spack does not autodetect the Cray programming
|
||||
environment, unset the environment variable ``MODULEPATH``. This
|
||||
will cause Spack to treat a linux container on a Cray system as a base
|
||||
linux distro.
|
||||
|
||||
.. _windows_support:
|
||||
|
||||
----------------
|
||||
|
@@ -2344,6 +2344,27 @@ you set ``parallel`` to ``False`` at the package level, then each call
|
||||
to ``make()`` will be sequential by default, but packagers can call
|
||||
``make(parallel=True)`` to override it.
|
||||
|
||||
Note that the ``--jobs`` option works out of the box for all standard
|
||||
build systems. If you are using a non-standard build system instead, you
|
||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
||||
by the ``--jobs`` option:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
options = [
|
||||
...
|
||||
'--jobs', str(make_jobs),
|
||||
]
|
||||
...
|
||||
make_xios = Executable("./make_xios")
|
||||
make_xios(*options)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Install-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -5173,12 +5194,6 @@ installed executable. The check is implemented as follows:
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
|
||||
The API for adding tests is not yet considered stable and may change
|
||||
in future releases.
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""""""
|
||||
Checking build-time test results
|
||||
""""""""""""""""""""""""""""""""
|
||||
@@ -5216,38 +5231,42 @@ be left in the build stage directory as illustrated below:
|
||||
Stand-alone tests
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
While build-time tests are integrated with the build process, stand-alone
|
||||
While build-time tests are integrated with the installation process, stand-alone
|
||||
tests are expected to run days, weeks, even months after the software is
|
||||
installed. The goal is to provide a mechanism for gaining confidence that
|
||||
packages work as installed **and** *continue* to work as the underlying
|
||||
software evolves. Packages can add and inherit stand-alone tests. The
|
||||
`spack test`` command is used to manage stand-alone testing.
|
||||
``spack test`` command is used for stand-alone testing.
|
||||
|
||||
.. note::
|
||||
.. admonition:: Stand-alone test methods should complete within a few minutes.
|
||||
|
||||
Execution speed is important since these tests are intended to quickly
|
||||
assess whether installed specs work on the system. Consequently, they
|
||||
should run relatively quickly -- as in on the order of at most a few
|
||||
minutes -- while ideally executing all, or at least key aspects of the
|
||||
installed software.
|
||||
assess whether installed specs work on the system. Spack cannot spare
|
||||
resources for more extensive testing of packages included in CI stacks.
|
||||
|
||||
.. note::
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, there is no reason to proceed with more resource-intensive
|
||||
tests until those have been investigated.
|
||||
|
||||
Passing stand-alone tests indicate that more thorough testing, such
|
||||
as running extensive unit or regression tests, or tests that run at
|
||||
scale can proceed without wasting resources on a problematic installation.
|
||||
Consequently, stand-alone tests should run relatively quickly -- as in
|
||||
on the order of at most a few minutes -- while testing at least key aspects
|
||||
of the installed software. Save more extensive testing for other tools.
|
||||
|
||||
Tests are defined in the package using methods with names beginning ``test_``.
|
||||
This allows Spack to support multiple independent checks, or parts. Files
|
||||
needed for testing, such as source, data, and expected outputs, may be saved
|
||||
from the build and or stored with the package in the repository. Regardless
|
||||
of origin, these files are automatically copied to the spec's test stage
|
||||
directory prior to execution of the test method(s). Spack also provides some
|
||||
helper functions to facilitate processing.
|
||||
directory prior to execution of the test method(s). Spack also provides helper
|
||||
functions to facilitate common processing.
|
||||
|
||||
.. tip::
|
||||
|
||||
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
|
||||
|
||||
Passing stand-alone tests justify performing more thorough testing, such
|
||||
as running extensive unit or regression tests or tests that run at scale,
|
||||
when available. These tests are outside of the scope of Spack packaging.
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, no reason to proceed with more resource-intensive tests until
|
||||
the failures have been investigated.
|
||||
|
||||
.. _configure-test-stage:
|
||||
|
||||
@@ -5255,30 +5274,26 @@ helper functions to facilitate processing.
|
||||
Configuring the test stage directory
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Stand-alone tests utilize a test stage directory for building, running,
|
||||
and tracking results in the same way Spack uses a build stage directory.
|
||||
The default test stage root directory, ``~/.spack/test``, is defined in
|
||||
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
|
||||
customizable by adding or changing the ``test_stage`` path in the high-level
|
||||
``config`` of the appropriate ``config.yaml`` file such that:
|
||||
Stand-alone tests utilize a test stage directory to build, run, and track
|
||||
tests in the same way Spack uses a build stage directory to install software.
|
||||
The default test stage root directory, ``$HOME/.spack/test``, is defined in
|
||||
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
|
||||
changing the ``test_stage`` path such that:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
test_stage: /path/to/test/stage
|
||||
|
||||
Packages can use the ``self.test_suite.stage`` property to access this setting.
|
||||
Other package properties that provide access to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing staged files <accessing-files>`.
|
||||
Packages can use the ``self.test_suite.stage`` property to access the path.
|
||||
|
||||
.. note::
|
||||
.. admonition:: Each spec being tested has its own test stage directory.
|
||||
|
||||
The test stage path is the root directory for the **entire suite**.
|
||||
In other words, it is the root directory for **all specs** being
|
||||
tested by the ``spack test run`` command. Each spec gets its own
|
||||
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
to access the spec-specific test stage directory.
|
||||
The ``config:test_stage`` option is the path to the root of a
|
||||
**test suite**'s stage directories.
|
||||
|
||||
Other package properties that provide paths to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing-files`.
|
||||
|
||||
.. _adding-standalone-tests:
|
||||
|
||||
@@ -5291,61 +5306,144 @@ Test recipes are defined in the package using methods with names beginning
|
||||
Each method has access to the information Spack tracks on the package, such
|
||||
as options, compilers, and dependencies, supporting the customization of tests
|
||||
to the build. Standard python ``assert`` statements and other error reporting
|
||||
mechanisms are available. Such exceptions are automatically caught and reported
|
||||
mechanisms can be used. These exceptions are automatically caught and reported
|
||||
as test failures.
|
||||
|
||||
Each test method is an implicit test part named by the method and whose
|
||||
purpose is the method's docstring. Providing a purpose gives context for
|
||||
aiding debugging. A test method may contain embedded test parts. Spack
|
||||
outputs the test name and purpose prior to running each test method and
|
||||
any embedded test parts. For example, ``MyPackage`` below provides two basic
|
||||
examples of installation tests: ``test_always_fails`` and ``test_example``.
|
||||
As the name indicates, the first always fails. The second simply runs the
|
||||
installed example.
|
||||
Each test method is an *implicit test part* named by the method. Its purpose
|
||||
is the method's docstring. Providing a meaningful purpose for the test gives
|
||||
context that can aid debugging. Spack outputs both the name and purpose at the
|
||||
start of test execution so it's also important that the docstring/purpose be
|
||||
brief.
|
||||
|
||||
.. tip::
|
||||
|
||||
We recommend naming test methods so it is clear *what* is being tested.
|
||||
For example, if a test method is building and or running an executable
|
||||
called ``example``, then call the method ``test_example``. This, together
|
||||
with a similarly meaningful test purpose, will aid test comprehension,
|
||||
debugging, and maintainability.
|
||||
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
on the installed software, such as build options, dependencies, and compilers.
|
||||
Build options and dependencies are accessed using the same spec checks used
|
||||
by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
|
||||
.. admonition:: Spack automatically sets up the test stage directory and environment.
|
||||
|
||||
Spack automatically creates the test stage directory and copies
|
||||
relevant files *prior to* running tests. It can also ensure build
|
||||
dependencies are available **if** necessary.
|
||||
|
||||
The path to the test stage is configurable (see :ref:`configure-test-stage`).
|
||||
|
||||
Files that Spack knows to copy are those saved from the build (see
|
||||
:ref:`cache_extra_test_sources`) and those added to the package repository
|
||||
(see :ref:`cache_custom_files`).
|
||||
|
||||
Spack will use the value of the ``test_requires_compiler`` property to
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
the executable runs successfully. If the installed spec is not expected to have
|
||||
``example2``, then the check at the top of the method will raise a special
|
||||
``SkipTest`` exception, which is captured to facilitate reporting skipped test
|
||||
parts to tools like CDash.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""run installed example"""
|
||||
"""ensure installed example works"""
|
||||
expected = "Done."
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
|
||||
# Capture stdout and stderr from running the Executable
|
||||
# and check that the expected output was produced.
|
||||
out = example(output=str.split, error=str.split)
|
||||
assert expected in out, f"Expected '{expected}' in the output"
|
||||
|
||||
def test_example2(self):
|
||||
"""run installed example2"""
|
||||
if self.spec.satisfies("@:1.0"):
|
||||
# Raise SkipTest to ensure flagging the test as skipped for
|
||||
# test reporting purposes.
|
||||
raise SkipTest("Test is only available for v1.1 on")
|
||||
|
||||
example2 = which(self.prefix.bin.example2)
|
||||
example2()
|
||||
|
||||
Output showing the identification of each test part after running the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test run --alias mypackage mypackage@1.0
|
||||
$ spack test run --alias mypackage mypackage@2.0
|
||||
==> Spack test mypackage
|
||||
...
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
|
||||
...
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
|
||||
PASSED: MyPackage::test_example
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
|
||||
...
|
||||
PASSED
|
||||
PASSED: MyPackage::test_example2
|
||||
|
||||
.. admonition:: Do NOT implement tests that must run in the installation prefix.
|
||||
|
||||
.. note::
|
||||
Use of the package spec's installation prefix for building and running
|
||||
tests is **strongly discouraged**. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
|
||||
If ``MyPackage`` were a recipe for a library, the tests should build
|
||||
an example or test program that is then executed.
|
||||
Instead, start these test methods by explicitly copying the needed files
|
||||
from the installation prefix to the test stage directory. Note the test
|
||||
stage directory is the current directory when the test is executed with
|
||||
the ``spack test run`` command.
|
||||
|
||||
A test method can include test parts using the ``test_part`` context manager.
|
||||
Each part is treated as an independent check to allow subsequent test parts
|
||||
to execute even after a test part fails.
|
||||
.. admonition:: Test methods for library packages should build test executables.
|
||||
|
||||
.. _test-part:
|
||||
Stand-alone tests for library packages *should* build test executables
|
||||
that utilize the *installed* library. Doing so ensures the tests follow
|
||||
a similar build process that users of the library would follow.
|
||||
|
||||
For more information on how to do this, see :ref:`test-build-tests`.
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages with stand-alone tests, run
|
||||
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _adding-standalone-test-parts:
|
||||
|
||||
"""""""""""""""""""""""""""""
|
||||
Adding stand-alone test parts
|
||||
"""""""""""""""""""""""""""""
|
||||
|
||||
Sometimes dependencies between steps of a test lend themselves to being
|
||||
broken into parts. Tracking the pass/fail status of each part may aid
|
||||
debugging. Spack provides a ``test_part`` context manager for use within
|
||||
test methods.
|
||||
|
||||
Each test part is independently run, tracked, and reported. Test parts are
|
||||
executed in the order they appear. If one fails, subsequent test parts are
|
||||
still performed even if they would also fail. This allows tools like CDash
|
||||
to track and report the status of test parts across runs. The pass/fail status
|
||||
of the enclosing test is derived from the statuses of the embedded test parts.
|
||||
|
||||
.. admonition:: Test method and test part names **must** be unique.
|
||||
|
||||
Test results reporting requires that test methods and embedded test parts
|
||||
within a package have unique names.
|
||||
|
||||
The signature for ``test_part`` is:
|
||||
|
||||
@@ -5367,40 +5465,68 @@ where each argument has the following meaning:
|
||||
* ``work_dir`` is the path to the directory in which the test will run.
|
||||
|
||||
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
|
||||
|
||||
.. admonition:: Tests should **not** run under the installation directory.
|
||||
.. admonition:: Start test part names with the name of the enclosing test.
|
||||
|
||||
Use of the package spec's installation directory for building and running
|
||||
tests is **strongly** discouraged. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
We **highly recommend** starting the names of test parts with the name
|
||||
of the enclosing test. Doing so helps with the comprehension, readability
|
||||
and debugging of test results.
|
||||
|
||||
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
|
||||
These checks can be implemented as separate checks or, as illustrated below,
|
||||
embedded test parts.
|
||||
Suppose ``MyPackage`` installs multiple executables that need to run in a
|
||||
specific order since the outputs from one are inputs of others. Further suppose
|
||||
we want to add an integration test that runs the executables in order. We can
|
||||
accomplish this goal by implementing a stand-alone test method consisting of
|
||||
test parts for each executable as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_example(self):
|
||||
"""run installed examples"""
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
def test_series(self):
|
||||
"""run setup, perform, and report"""
|
||||
|
||||
In this case, there will be an implicit test part for ``test_example``
|
||||
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
|
||||
will be executed regardless of whether the first passes. The test
|
||||
log for a run where the first executable fails and the second passes
|
||||
is illustrated below.
|
||||
with test_part(self, "test_series_setup", purpose="setup operation"):
|
||||
exe = which(self.prefix.bin.setup))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_run", purpose="perform operation"):
|
||||
exe = which(self.prefix.bin.run))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_report", purpose="generate report"):
|
||||
exe = which(self.prefix.bin.report))
|
||||
exe()
|
||||
|
||||
The result is ``test_series`` runs the following executable in order: ``setup``,
|
||||
``run``, and ``report``. In this case no options are passed to any of the
|
||||
executables and no outputs from running them are checked. Consequently, the
|
||||
implementation could be simplified with a for-loop as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_series(self):
|
||||
"""execute series setup, run, and report"""
|
||||
|
||||
for exe, reason in [
|
||||
("setup", "setup operation"),
|
||||
("run", "perform operation"),
|
||||
("report", "generate report")
|
||||
]:
|
||||
with test_part(self, f"test_series_{exe}", purpose=reason):
|
||||
exe = which(self.prefix.bin.join(exe))
|
||||
exe()
|
||||
|
||||
In both cases, since we're using a context manager, each test part in
|
||||
``test_series`` will execute regardless of the status of the other test
|
||||
parts.
|
||||
|
||||
Now let's look at the output from running the stand-alone tests where
|
||||
the second test part, ``test_series_run``, fails.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -5410,50 +5536,68 @@ is illustrated below.
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
|
||||
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
|
||||
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
|
||||
...
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
|
||||
PASSED: MyPackage::test_series_setup
|
||||
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
|
||||
...
|
||||
PASSED
|
||||
FAILED: MyPackage::test_series_run
|
||||
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
|
||||
...
|
||||
FAILED: MyPackage::test_series_report
|
||||
FAILED: MyPackage::test_series
|
||||
...
|
||||
|
||||
.. warning::
|
||||
Since test parts depended on the success of previous parts, we see that the
|
||||
failure of one results in the failure of subsequent checks and the overall
|
||||
result of the test method, ``test_series``, is failure.
|
||||
|
||||
Test results reporting requires that each test method and embedded
|
||||
test part for a package have a unique name.
|
||||
.. tip::
|
||||
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
Spack has on how the software was built, such as build options, dependencies,
|
||||
and compilers. Build options and dependencies are accessed with the normal
|
||||
spec checks. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
Accessing compilers in stand-alone tests that are used by the build requires
|
||||
setting a package property as described :ref:`below <test-compilation>`.
|
||||
If you want to see more examples from packages using ``test_part``, run
|
||||
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _test-build-tests:
|
||||
|
||||
.. _test-compilation:
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
Building and running test executables
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
|
||||
"""""""""""""""""""""""""
|
||||
Enabling test compilation
|
||||
"""""""""""""""""""""""""
|
||||
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
|
||||
|
||||
If you want to build and run binaries in tests, then you'll need to tell
|
||||
Spack to load the package's compiler configuration. This is accomplished
|
||||
by setting the package's ``test_requires_compiler`` property to ``True``.
|
||||
We **highly recommend** re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities when they reside
|
||||
within the software's repository. More information on saving files from
|
||||
the installation process can be found at :ref:`cache_extra_test_sources`.
|
||||
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`cache_custom_files`). It will be important to
|
||||
remember to maintain them so they work across listed or supported versions
|
||||
of the package.
|
||||
|
||||
.. note::
|
||||
Packages that build libraries are good examples of cases where you'll want
|
||||
to build test executables from the installed software before running them.
|
||||
Doing so requires you to let Spack know it needs to load the package's
|
||||
compiler configuration. This is accomplished by setting the package's
|
||||
``test_requires_compiler`` property to ``True``.
|
||||
|
||||
The ``test_requires_compiler`` property should be added at the top of
|
||||
the package near other attributes, such as the ``homepage`` and ``url``.
|
||||
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
|
||||
|
||||
Below illustrates using this feature to compile an example.
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
|
||||
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
|
||||
|
||||
Be sure to add the property at the top of the package class under other
|
||||
properties like the ``homepage``.
|
||||
|
||||
The example below, which ignores how ``cxx-example.cpp`` is acquired,
|
||||
illustrates the basic process of compiling a test executable using the
|
||||
installed library before running it.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5477,28 +5621,22 @@ Below illustrates using this feature to compile an example.
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
|
||||
Typically the files used to build and or run test executables are either
|
||||
cached from the installation (see :ref:`cache_extra_test_sources`) or added
|
||||
to the package repository (see :ref:`cache_custom_files`). There is nothing
|
||||
preventing the use of both.
|
||||
|
||||
.. _cache_extra_test_sources:
|
||||
|
||||
"""""""""""""""""""""""
|
||||
Saving build-time files
|
||||
"""""""""""""""""""""""
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
Saving build- and install-time files
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
.. note::
|
||||
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
You can use the ``cache_extra_test_sources`` helper routine to copy
|
||||
directories and or files from the source build stage directory to the
|
||||
package's installation directory. Spack will automatically copy these
|
||||
files for you when it sets up the test stage directory and before it
|
||||
begins running the tests.
|
||||
|
||||
The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
@@ -5513,46 +5651,69 @@ where each argument has the following meaning:
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
relative paths under the test stage directory prior to executing stand-alone
|
||||
tests.
|
||||
.. warning::
|
||||
|
||||
For example, a package method for copying everything in the ``tests``
|
||||
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||
and using ``foo.c`` in a test method is illustrated below.
|
||||
Paths provided in the ``srcs`` argument **must be relative** to the
|
||||
staged source directory. They will be copied to the equivalent relative
|
||||
location under the test stage directory prior to test execution.
|
||||
|
||||
Contents of subdirectories and files are copied to a special test cache
|
||||
subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the post-``install`` copy method
|
||||
**after** the call to ``cache_extra_test_sources``. This will reduce
|
||||
the amount of unnecessary work in the test method **and** avoid problems
|
||||
running stand-alone tests in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes
|
||||
(see :ref:`file-filtering`).
|
||||
|
||||
Below is a basic example of a test that relies on files from the installation.
|
||||
This package method re-uses the contents of the ``examples`` subdirectory,
|
||||
which is assumed to have all of the files implemented to allow ``make`` to
|
||||
compile and link ``foo.c`` and ``bar.c`` against the package's installed
|
||||
library.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyLibPackage(Package):
|
||||
class MyLibPackage(MakefilePackage):
|
||||
...
|
||||
|
||||
@run_after("install")
|
||||
def copy_test_files(self):
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
cache_extra_test_sources(self, srcs)
|
||||
cache_extra_test_sources(self, "examples")
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
def test_example(self):
|
||||
"""build and run the examples"""
|
||||
examples_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(examples_dir):
|
||||
make = which("make")
|
||||
make()
|
||||
|
||||
In this case, the method copies the associated files from the build
|
||||
stage, **after** the software is installed, to the package's test
|
||||
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
|
||||
before running the program.
|
||||
for program in ["foo", "bar"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{program}",
|
||||
purpose=f"ensure {program} runs"
|
||||
):
|
||||
exe = Executable(program)
|
||||
exe()
|
||||
|
||||
In this case, ``copy_test_files`` copies the associated files from the
|
||||
build stage to the package's test cache directory under the installation
|
||||
prefix. Running ``spack test run`` for the package results in Spack copying
|
||||
the directory and its contents to the the test stage directory. The
|
||||
``working_dir`` context manager ensures the commands within it are executed
|
||||
from the ``examples_dir``. The test builds the software using ``make`` before
|
||||
running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -5561,43 +5722,18 @@ before running the program.
|
||||
|
||||
The key to copying files for stand-alone testing at build time is use
|
||||
of the ``run_after`` directive, which ensures the associated files are
|
||||
copied **after** the provided build stage where the files **and**
|
||||
installation prefix are available.
|
||||
copied **after** the provided build stage (``install``) when the installation
|
||||
prefix **and** files are available.
|
||||
|
||||
These paths are **automatically copied** from cache to the test stage
|
||||
directory prior to the execution of any stand-alone tests. Tests access
|
||||
the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
The test method uses the path contained in the package's
|
||||
``self.test_suite.current_test_cache_dir`` property for the root directory
|
||||
of the copied files. In this case, that's the ``examples`` subdirectory.
|
||||
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
Library developers will want to build the associated tests
|
||||
against their **installed** libraries before running them.
|
||||
|
||||
.. note::
|
||||
|
||||
While source and input files are generally recommended, binaries
|
||||
**may** also be cached by the build process. Only you, as the package
|
||||
writer or maintainer, know whether these files would be appropriate
|
||||
for testing the installed software weeks to months later.
|
||||
|
||||
.. note::
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes.
|
||||
See :ref:`file manipulation <file-manipulation>`.
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages that cache build files, run
|
||||
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _cache_custom_files:
|
||||
|
||||
@@ -5605,8 +5741,9 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
Adding custom files
|
||||
"""""""""""""""""""
|
||||
|
||||
In some cases it can be useful to have files that can be used to build or
|
||||
check the results of tests. Examples include:
|
||||
Sometimes it is helpful or necessary to include custom files for building and
|
||||
or checking the results of tests as part of the package. Examples of the types
|
||||
of files that might be useful are:
|
||||
|
||||
- test source files
|
||||
- test input files
|
||||
@@ -5614,17 +5751,15 @@ check the results of tests. Examples include:
|
||||
- expected test outputs
|
||||
|
||||
While obtaining such files from the software repository is preferred (see
|
||||
:ref:`adding build-time files <cache_extra_test_sources>`), there are
|
||||
circumstances where that is not feasible (e.g., the software is not being
|
||||
actively maintained). When test files can't be obtained from the repository
|
||||
or as a supplement to files that can, Spack supports the inclusion of
|
||||
additional files under the ``test`` subdirectory of the package in the
|
||||
Spack repository.
|
||||
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
|
||||
feasible such as when the software is not being actively maintained. When test
|
||||
files cannot be obtained from the repository or there is a need to supplement
|
||||
files that can, Spack supports the inclusion of additional files under the
|
||||
``test`` subdirectory of the package in the Spack repository.
|
||||
|
||||
Spack **automatically copies** the contents of that directory to the
|
||||
test staging directory prior to running stand-alone tests. Test methods
|
||||
access those files using the ``self.test_suite.current_test_data_dir``
|
||||
property as shown below.
|
||||
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
|
||||
package's ``test`` subdirectory. It also assumes the program simply needs to
|
||||
be compiled and linked against the installed ``MyLibrary`` software.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5634,17 +5769,29 @@ property as shown below.
|
||||
test_requires_compiler = True
|
||||
...
|
||||
|
||||
def test_example(self):
|
||||
def test_custom_example(self):
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
src_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
custom_example = which(exe)
|
||||
custom_example()
|
||||
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
|
||||
custom_example = Executable(exe)
|
||||
custom_example()
|
||||
|
||||
In this case, ``spack test run`` for the package results in Spack copying
|
||||
the contents of the ``test`` subdirectory to the test stage directory path
|
||||
in ``self.test_suite.current_test_data_dir`` before calling
|
||||
``test_custom_example``. Use of the ``working_dir`` context manager
|
||||
ensures the commands to build and run the program are performed from
|
||||
within the appropriate subdirectory of the test stage.
|
||||
|
||||
.. _expected_test_output_from_file:
|
||||
|
||||
@@ -5653,9 +5800,8 @@ Reading expected output from a file
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
to retrieve properly formatted text from a file potentially containing
|
||||
special characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
@@ -5665,10 +5811,13 @@ The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
where ``filename`` is the path to the file containing the expected output.
|
||||
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
accessed by tests using the ``self.test_suite.current_test_data_dir``
|
||||
property. The example below illustrates how to read a file that was
|
||||
added to the package's ``test`` subdirectory.
|
||||
The path provided to ``filename`` for one of the copied custom files
|
||||
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
|
||||
``self.test_suite.current_test_data_dir``.
|
||||
|
||||
The example below shows how to reference both the custom database
|
||||
(``packages.db``) and expected output (``dump.out``) files Spack copies
|
||||
to the test stage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5690,8 +5839,9 @@ added to the package's ``test`` subdirectory.
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
If the files were instead cached from installing the software, the paths to the
|
||||
two files would be found under the ``self.test_suite.current_test_cache_dir``
|
||||
directory as shown below:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5699,17 +5849,24 @@ source code, the path would be obtained as shown below.
|
||||
"""check example table dump"""
|
||||
test_cache_dir = self.test_suite.current_test_cache_dir
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
|
||||
...
|
||||
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
Alternatively, if both files had been installed by the software into the
|
||||
``share/tests`` subdirectory of the installation prefix, the paths to the
|
||||
two files would be referenced as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_example(self):
|
||||
"""check example table dump"""
|
||||
db_filename = join_path(self.prefix.share.tests, "packages.db")
|
||||
|
||||
db_filename = self.prefix.share.tests.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(
|
||||
self.prefix.share.tests.join("dump.out")
|
||||
)
|
||||
...
|
||||
|
||||
.. _check_outputs:
|
||||
|
||||
@@ -5717,9 +5874,9 @@ follows:
|
||||
Comparing expected to actual outputs
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
The helper function ``check_outputs`` is available for packages to ensure
|
||||
the expected outputs from running an executable are contained within the
|
||||
actual outputs.
|
||||
The ``check_outputs`` helper routine is available for packages to ensure
|
||||
multiple expected outputs from running an executable are contained within
|
||||
the actual outputs.
|
||||
|
||||
The signature for ``check_outputs`` is:
|
||||
|
||||
@@ -5745,11 +5902,17 @@ Invoking the method is the equivalent of:
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages that use this helper, run
|
||||
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
Accessing package- and test-related files
|
||||
Finding package- and test-related files
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You may need to access files from one or more locations when writing
|
||||
@@ -5758,8 +5921,7 @@ include test source files or includes them but has no way to build the
|
||||
executables using the installed headers and libraries. In these cases
|
||||
you may need to reference the files relative to one or more root directory.
|
||||
The table below lists relevant path properties and provides additional
|
||||
examples of their use.
|
||||
:ref:`Reading expected output <expected_test_output_from_file>` provides
|
||||
examples of their use. See :ref:`expected_test_output_from_file` for
|
||||
examples of accessing files saved from the software repository, package
|
||||
repository, and installation.
|
||||
|
||||
@@ -5788,7 +5950,6 @@ repository, and installation.
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
|
||||
|
||||
.. _inheriting-tests:
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
@@ -5831,7 +5992,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
.. warning::
|
||||
|
||||
Any package that implements a test method with the same name as an
|
||||
inherited method overrides the inherited method. If that is not the
|
||||
inherited method will override the inherited method. If that is not the
|
||||
goal and you are not explicitly calling and adding functionality to
|
||||
the inherited method for the test, then make sure that all test methods
|
||||
and embedded test parts have unique test names.
|
||||
@@ -5996,6 +6157,8 @@ running:
|
||||
This is already part of the boilerplate for packages created with
|
||||
``spack create``.
|
||||
|
||||
.. _file-filtering:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Filtering functions
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -253,17 +253,6 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
||||
spack has a way to know for certain about the status of any concrete spec on
|
||||
the remote mirror, but can slow down pipeline generation significantly.
|
||||
|
||||
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||
document through a series of optimization passes designed to reduce the size
|
||||
of the generated file.
|
||||
|
||||
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||
is that before any job can begin, all jobs in previous stages must first
|
||||
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||
when using ``dependencies`` instead of ``needs``.
|
||||
|
||||
The optional ``--output-file`` argument should be an absolute path (including
|
||||
file name) to the generated pipeline, and if not given, the default is
|
||||
``./.gitlab-ci.yml``.
|
||||
|
@@ -476,9 +476,3 @@ implemented using Python's built-in `sys.path
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
.. warning::
|
||||
|
||||
The mechanism for extending packages is not yet extensively tested,
|
||||
and extending packages across repositories imposes inter-repo
|
||||
dependencies, which may be hard to manage. Use this feature at your
|
||||
own risk, but let us know if you have a use case for it.
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx_design==0.6.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.0
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.2
|
||||
pytest==8.2.2
|
||||
isort==5.13.2
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
mypy==1.10.0
|
||||
flake8==7.1.0
|
||||
mypy==1.10.1
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit cbb1fd5eb397a70d466e5160b393b87b0dbcc78f)
|
||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
12
lib/spack/external/archspec/cpu/detect.py
vendored
12
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,11 +47,7 @@ def decorator(factory):
|
||||
|
||||
|
||||
def partial_uarch(
|
||||
name: str = "",
|
||||
vendor: str = "",
|
||||
features: Optional[Set[str]] = None,
|
||||
generation: int = 0,
|
||||
cpu_part: str = "",
|
||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||
) -> Microarchitecture:
|
||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||
return Microarchitecture(
|
||||
@@ -61,7 +57,6 @@ def partial_uarch(
|
||||
features=features or set(),
|
||||
compilers={},
|
||||
generation=generation,
|
||||
cpu_part=cpu_part,
|
||||
)
|
||||
|
||||
|
||||
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
|
||||
return partial_uarch(
|
||||
vendor=_canonicalize_aarch64_vendor(data),
|
||||
features=_feature_set(data, key="Features"),
|
||||
cpu_part=data.get("CPU part", ""),
|
||||
)
|
||||
|
||||
if architecture in (PPC64LE, PPC64):
|
||||
@@ -351,10 +345,6 @@ def sorting_fn(item):
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
||||
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
||||
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
|
@@ -2,7 +2,9 @@
|
||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Types and functions to manage information on CPU microarchitectures."""
|
||||
"""Types and functions to manage information
|
||||
on CPU microarchitectures.
|
||||
"""
|
||||
import functools
|
||||
import platform
|
||||
import re
|
||||
@@ -63,24 +65,21 @@ class Microarchitecture:
|
||||
passed in as argument above.
|
||||
* versions: versions that support this micro-architecture.
|
||||
|
||||
generation (int): generation of the micro-architecture, if relevant.
|
||||
cpu_part (str): cpu part of the architecture, if relevant.
|
||||
generation (int): generation of the micro-architecture, if
|
||||
relevant.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||
# pylint: disable=too-many-arguments
|
||||
#: Aliases for micro-architecture's features
|
||||
feature_aliases = FEATURE_ALIASES
|
||||
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.name = name
|
||||
self.parents = parents
|
||||
self.vendor = vendor
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
# Only relevant for PowerPC
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@@ -112,7 +111,6 @@ def __eq__(self, other):
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
@coerce_target_names
|
||||
@@ -145,8 +143,7 @@ def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = (
|
||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
||||
"cpu_part={0.cpu_part!r})"
|
||||
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
||||
)
|
||||
return fmt.format(self)
|
||||
|
||||
@@ -193,7 +190,6 @@ def to_dict(self):
|
||||
"generation": self.generation,
|
||||
"parents": [str(x) for x in self.parents],
|
||||
"compilers": self.compilers,
|
||||
"cpupart": self.cpu_part,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -206,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
|
||||
features=set(data["features"]),
|
||||
compilers=data.get("compilers", {}),
|
||||
generation=data.get("generation", 0),
|
||||
cpu_part=data.get("cpupart", ""),
|
||||
)
|
||||
|
||||
def optimization_flags(self, compiler, version):
|
||||
@@ -365,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
|
||||
features = set(values["features"])
|
||||
compilers = values.get("compilers", {})
|
||||
generation = values.get("generation", 0)
|
||||
cpu_part = values.get("cpupart", "")
|
||||
|
||||
targets[name] = Microarchitecture(
|
||||
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
||||
)
|
||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
|
@@ -2225,14 +2225,10 @@
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:23.8",
|
||||
"versions": "21.11:",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
||||
},
|
||||
{
|
||||
"versions": "23.9:",
|
||||
"flags": "-tp {name}"
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -2715,8 +2711,7 @@
|
||||
"flags": "-mcpu=thunderx2t99"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x0af"
|
||||
}
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["armv8.2a"],
|
||||
@@ -2784,8 +2779,7 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x001"
|
||||
}
|
||||
},
|
||||
"cortex_a72": {
|
||||
"from": ["aarch64"],
|
||||
@@ -2822,8 +2816,7 @@
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd08"
|
||||
}
|
||||
},
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
@@ -2844,7 +2837,8 @@
|
||||
"asimdrdm",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"asimddp"
|
||||
"asimddp",
|
||||
"ssbs"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -2908,8 +2902,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd0c"
|
||||
}
|
||||
},
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
@@ -2933,6 +2926,8 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
@@ -2941,6 +2936,7 @@
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"dcpodp",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
@@ -3008,7 +3004,7 @@
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng"
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
},
|
||||
{
|
||||
"versions": "12:",
|
||||
@@ -3032,8 +3028,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd40"
|
||||
}
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
@@ -3057,22 +3052,32 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
"bf16",
|
||||
"dgh"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -3097,19 +3102,15 @@
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.3.99",
|
||||
"versions": "10.0:11.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.4:11.99",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.2.99",
|
||||
"versions": "12.0:12.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "12.3:",
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
@@ -3144,112 +3145,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd4f"
|
||||
},
|
||||
"neoverse_n2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd49"
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
@@ -3315,8 +3211,7 @@
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x022"
|
||||
}
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
@@ -3394,8 +3289,7 @@
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x032"
|
||||
}
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
|
@@ -52,9 +52,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpupart": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -110,4 +107,4 @@
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -766,7 +766,6 @@ def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -789,8 +788,6 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -798,8 +795,6 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -872,16 +867,14 @@ def escaped_path(path):
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
symlink(target, d)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -892,21 +885,12 @@ def install_tree(
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Union
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
@@ -16,92 +17,66 @@
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
def _windows_symlink(
|
||||
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
|
||||
):
|
||||
"""On Windows with System Administrator privileges this will be a normal symbolic link via
|
||||
os.symlink. On Windows without privledges the link will be a junction for a directory and a
|
||||
hardlink for a file. On Windows the various link types are:
|
||||
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
"""
|
||||
Create a link.
|
||||
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
|
||||
even to a remote file or directory (using UNC in its path). Need System Administrator
|
||||
privileges to make these.
|
||||
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
|
||||
has at least 1 hard link (file's name). But when this method creates a new hard link there will
|
||||
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
|
||||
privileges.
|
||||
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
Junction: A link to a directory on the same or different volume (drive letter) but not to a
|
||||
remote directory. Don't need System Administrator privileges."""
|
||||
source_path = os.path.normpath(src)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(link_path)
|
||||
link_path = os.path.normpath(dst)
|
||||
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(
|
||||
f"Link path ({link_path}) already exists. Cannot create link."
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path):
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
if not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
|
||||
|
||||
def islink(path: str) -> bool:
|
||||
def _windows_islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
@@ -269,7 +244,7 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str, *, dir_fd=None):
|
||||
def _windows_readlink(path: str, *, dir_fd=None):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
@@ -338,6 +313,16 @@ def resolve_link_target_relative_to_the_link(link):
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
symlink = _windows_symlink
|
||||
readlink = _windows_readlink
|
||||
islink = _windows_islink
|
||||
else:
|
||||
symlink = os.symlink
|
||||
readlink = os.readlink
|
||||
islink = os.path.islink
|
||||
|
||||
|
||||
class SymlinkError(RuntimeError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
|
@@ -33,8 +33,23 @@
|
||||
pass
|
||||
|
||||
|
||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
||||
csi_pre = f"{esc}{lbracket}"
|
||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
||||
# General ansi escape sequences have well-defined prefixes,
|
||||
# but content and suffixes are less reliable.
|
||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
||||
esc_pre = f"{esc}[@-_]"
|
||||
esc_content = f"[^{esc}{bell}{newline}]"
|
||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.22.2"
|
||||
__version__ = "0.23.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -421,6 +421,10 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
github_pull_commits_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
@@ -436,14 +440,24 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
if re.match(github_pull_commits_re, patch.url):
|
||||
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ "must not be a pull request commit; "
|
||||
+ f"instead use {url}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ f"must end with {full_index_arg}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
@@ -451,9 +465,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
|
@@ -23,6 +23,7 @@
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
@@ -898,8 +899,9 @@ def url_read_method(url):
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except web_util.SpackWebError as e:
|
||||
tty.error(f"Error reading specfile: {url}: {e}")
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error("Error reading specfile: {0}".format(url))
|
||||
tty.error(url_err)
|
||||
return contents
|
||||
|
||||
try:
|
||||
@@ -2039,17 +2041,21 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_is_signed = True
|
||||
except web_util.SpackWebError as e1:
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except web_util.SpackWebError as e2:
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
tty.debug(
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||
e1,
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
@@ -2134,9 +2140,6 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
for mirror in mirror_collection.values():
|
||||
fetch_url = mirror.fetch_url
|
||||
# TODO: oci:// does not support signing.
|
||||
if fetch_url.startswith("oci://"):
|
||||
continue
|
||||
keys_url = url_util.join(
|
||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
||||
)
|
||||
@@ -2147,12 +2150,19 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except web_util.SpackWebError as url_err:
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
if web_util.url_exists(keys_index):
|
||||
err_msg = [
|
||||
"Unable to find public keys in {0},",
|
||||
" caught exception attempting to read from {1}.",
|
||||
]
|
||||
|
||||
tty.error(
|
||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||
f" caught exception attempting to read from {url_util.format(keys_index)}."
|
||||
"".join(err_msg).format(
|
||||
url_util.format(fetch_url), url_util.format(keys_index)
|
||||
)
|
||||
)
|
||||
|
||||
tty.debug(url_err)
|
||||
|
||||
continue
|
||||
@@ -2432,7 +2442,7 @@ def get_remote_hash(self):
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
except urllib.error.URLError:
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
@@ -2454,7 +2464,7 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
try:
|
||||
@@ -2495,7 +2505,10 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
headers = {
|
||||
"User-Agent": web_util.SPACK_USER_AGENT,
|
||||
"If-None-Match": '"{}"'.format(self.etag),
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
||||
@@ -2503,14 +2516,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
if e.getcode() == 304:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
|
||||
|
||||
headers = response.headers
|
||||
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
|
||||
@@ -2541,19 +2554,21 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError(
|
||||
"Could not fetch manifest from {}".format(url_manifest), e
|
||||
) from e
|
||||
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
except Exception as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
|
||||
# Get first blob hash, which should be the index.json
|
||||
try:
|
||||
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
||||
except Exception as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
|
||||
# Fresh?
|
||||
if index_digest.digest == self.local_hash:
|
||||
|
@@ -213,15 +213,18 @@ def _root_spec(spec_str: str) -> str:
|
||||
Args:
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a compiler requirement to the root spec.
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
|
||||
spec_str += f" platform={platform}"
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
|
@@ -92,7 +92,7 @@
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
from spack.util.module_cmd import load_module, path_from_modules
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -191,14 +191,6 @@ def __call__(self, *args, **kwargs):
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def _on_cray():
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
on_cray = str(host_platform) == "cray"
|
||||
using_cnl = re.match(r"cnl\d+", str(host_os))
|
||||
return on_cray, using_cnl
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -242,17 +234,6 @@ def clean_environment():
|
||||
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
||||
env.unset(varname)
|
||||
|
||||
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
||||
# interference with Spack dependencies.
|
||||
# CNL requires these variables to be set (or at least some of them,
|
||||
# depending on the CNL version).
|
||||
on_cray, using_cnl = _on_cray()
|
||||
if on_cray and not using_cnl:
|
||||
env.unset("CRAY_LD_LIBRARY_PATH")
|
||||
for varname in os.environ.keys():
|
||||
if "PKGCONF" in varname:
|
||||
env.unset(varname)
|
||||
|
||||
# Unset the following variables because they can affect installation of
|
||||
# Autotools and CMake packages.
|
||||
build_system_vars = [
|
||||
@@ -382,11 +363,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
_add_werror_handling(keep_werror, env)
|
||||
|
||||
# Set the target parameters that the compiler will add
|
||||
# Don't set on cray platform because the targeting module handles this
|
||||
if spec.satisfies("platform=cray"):
|
||||
isa_arg = ""
|
||||
else:
|
||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||
|
||||
# Trap spack-tracked compiler flags as appropriate.
|
||||
@@ -480,12 +457,9 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
# Enable ccache in the compiler wrapper
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
else:
|
||||
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -764,7 +738,9 @@ def get_rpaths(pkg):
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||
if mod_rpath:
|
||||
rpaths.append(mod_rpath)
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
@@ -834,14 +810,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray and not dirty:
|
||||
for mod in ["cray-mpich", "cray-libsci"]:
|
||||
module("unload", mod)
|
||||
|
||||
if target and target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
|
@@ -162,7 +162,9 @@ def initconfig_compiler_entries(self):
|
||||
ld_flags = " ".join(flags["ldflags"])
|
||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||
# CMake has separate linker arguments for types of builds.
|
||||
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
||||
# is used by the archiver, so don't include "STATIC" in this loop:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
||||
ld_string = ld_format_string.format(ld_type)
|
||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||
|
||||
|
@@ -110,9 +110,8 @@ def cuda_flags(arch_list):
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
# platform=linux, since they should also apply to platform=cray, and may
|
||||
# apply to platform=darwin. We currently do not provide conflicts for
|
||||
# platform=darwin with %apple-clang.
|
||||
# platform=linux, since they may apply to platform=darwin. We currently
|
||||
# do not provide conflicts for platform=darwin with %apple-clang.
|
||||
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
@@ -137,11 +136,14 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -846,6 +846,7 @@ def scalapack_libs(self):
|
||||
"^mpich@2:" in spec_root
|
||||
or "^cray-mpich" in spec_root
|
||||
or "^mvapich2" in spec_root
|
||||
or "^mvapich" in spec_root
|
||||
or "^intel-mpi" in spec_root
|
||||
or "^intel-oneapi-mpi" in spec_root
|
||||
or "^intel-parallel-studio" in spec_root
|
||||
@@ -936,32 +937,15 @@ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_o
|
||||
"I_MPI_ROOT": self.normalize_path("mpi"),
|
||||
}
|
||||
|
||||
# CAUTION - SIMILAR code in:
|
||||
# var/spack/repos/builtin/packages/mpich/package.py
|
||||
# var/spack/repos/builtin/packages/openmpi/package.py
|
||||
# var/spack/repos/builtin/packages/mvapich2/package.py
|
||||
#
|
||||
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
|
||||
if "platform=cray" in self.spec:
|
||||
# TODO: Confirm
|
||||
wrapper_vars.update(
|
||||
{
|
||||
"MPICC": compilers_of_client["CC"],
|
||||
"MPICXX": compilers_of_client["CXX"],
|
||||
"MPIF77": compilers_of_client["F77"],
|
||||
"MPIF90": compilers_of_client["F90"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||
wrapper_vars.update(
|
||||
{
|
||||
"MPICC": compiler_wrapper_commands["MPICC"],
|
||||
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
||||
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
||||
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
||||
}
|
||||
)
|
||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||
wrapper_vars.update(
|
||||
{
|
||||
"MPICC": compiler_wrapper_commands["MPICC"],
|
||||
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
||||
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
||||
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
||||
}
|
||||
)
|
||||
|
||||
# Ensure that the directory containing the compiler wrappers is in the
|
||||
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
||||
|
@@ -24,7 +24,6 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
||||
build_system("msbuild")
|
||||
conflicts("platform=linux", when="build_system=msbuild")
|
||||
conflicts("platform=darwin", when="build_system=msbuild")
|
||||
conflicts("platform=cray", when="build_system=msbuild")
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
|
@@ -24,7 +24,6 @@ class NMakePackage(spack.package_base.PackageBase):
|
||||
build_system("nmake")
|
||||
conflicts("platform=linux", when="build_system=nmake")
|
||||
conflicts("platform=darwin", when="build_system=nmake")
|
||||
conflicts("platform=cray", when="build_system=nmake")
|
||||
|
||||
|
||||
@spack.builder.builder("nmake")
|
||||
|
@@ -36,9 +36,8 @@ class IntelOneApiPackage(Package):
|
||||
"target=ppc64:",
|
||||
"target=ppc64le:",
|
||||
"target=aarch64:",
|
||||
"platform=darwin:",
|
||||
"platform=cray:",
|
||||
"platform=windows:",
|
||||
"platform=darwin",
|
||||
"platform=windows",
|
||||
]:
|
||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||
|
||||
|
@@ -22,6 +22,8 @@
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -551,10 +553,9 @@ def generate_gitlab_ci_yaml(
|
||||
env,
|
||||
print_summary,
|
||||
output_file,
|
||||
*,
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
run_optimizer=False,
|
||||
use_dependencies=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
@@ -575,12 +576,6 @@ def generate_gitlab_ci_yaml(
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
@@ -1111,7 +1106,7 @@ def main_script_replacements(cmd):
|
||||
if cdash_handler and cdash_handler.auth_token:
|
||||
try:
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
@@ -1271,17 +1266,6 @@ def main_script_replacements(cmd):
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if run_optimizer:
|
||||
import spack.ci_optimization as ci_opt
|
||||
|
||||
output_object = ci_opt.optimizer(output_object)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
|
||||
output_object = cinw.needs_to_dependencies(output_object)
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
@@ -1310,8 +1294,11 @@ def main_script_replacements(cmd):
|
||||
if not rebuild_everything:
|
||||
sys.exit(1)
|
||||
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
@@ -2095,7 +2082,7 @@ def read_broken_spec(broken_spec_url):
|
||||
"""
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||
except web_util.SpackWebError:
|
||||
except (URLError, web_util.SpackWebError, HTTPError):
|
||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||
return None
|
||||
|
||||
|
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get("job")
|
||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||
else needs_entry if isinstance(needs_entry, str) else None
|
||||
)
|
||||
|
||||
|
||||
def convert_job(job_entry):
|
||||
if not isinstance(job_entry, collections.abc.Mapping):
|
||||
return job_entry
|
||||
|
||||
needs = job_entry.get("needs")
|
||||
if needs is None:
|
||||
return job_entry
|
||||
|
||||
new_job = {}
|
||||
new_job.update(job_entry)
|
||||
del new_job["needs"]
|
||||
|
||||
new_job["dependencies"] = list(
|
||||
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
||||
)
|
||||
|
||||
return new_job
|
||||
|
||||
|
||||
def needs_to_dependencies(yaml):
|
||||
return dict((k, convert_job(v)) for k, v in yaml.items())
|
@@ -1,363 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import collections.abc
|
||||
import copy
|
||||
import hashlib
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
return syaml.syaml_dict(
|
||||
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||
)
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def matches(obj, proto):
|
||||
"""Returns True if the test object "obj" matches the prototype object
|
||||
"proto".
|
||||
|
||||
If obj and proto are mappings, obj matches proto if (key in obj) and
|
||||
(obj[key] matches proto[key]) for every key in proto.
|
||||
|
||||
If obj and proto are sequences, obj matches proto if they are of the same
|
||||
length and (a matches b) for every (a,b) in zip(obj, proto).
|
||||
|
||||
Otherwise, obj matches proto if obj == proto.
|
||||
|
||||
Precondition: proto must not have any reference cycles
|
||||
"""
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
if not isinstance(proto, collections.abc.Mapping):
|
||||
return False
|
||||
|
||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
if len(obj) != len(proto):
|
||||
return False
|
||||
|
||||
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
||||
|
||||
return obj == proto
|
||||
|
||||
|
||||
def subkeys(obj, proto):
|
||||
"""Returns the test mapping "obj" after factoring out the items it has in
|
||||
common with the prototype mapping "proto".
|
||||
|
||||
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
||||
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
||||
for every such key, "k", its corresponding value is:
|
||||
|
||||
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
||||
- b[key] if (key in b) and not matches(a[key], b[key]),
|
||||
or
|
||||
- a[key] otherwise
|
||||
|
||||
|
||||
If obj and proto are mappings, the returned object is the smallest object,
|
||||
"a", such that merge(a, proto) matches obj.
|
||||
|
||||
Otherwise, obj is returned.
|
||||
"""
|
||||
if not (
|
||||
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||
):
|
||||
return obj
|
||||
|
||||
new_obj = {}
|
||||
for key, value in obj.items():
|
||||
if key not in proto:
|
||||
new_obj[key] = value
|
||||
continue
|
||||
|
||||
if matches(value, proto[key]) and matches(proto[key], value):
|
||||
continue
|
||||
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
new_obj[key] = subkeys(value, proto[key])
|
||||
continue
|
||||
|
||||
new_obj[key] = value
|
||||
|
||||
return new_obj
|
||||
|
||||
|
||||
def add_extends(yaml, key):
|
||||
"""Modifies the given object "yaml" so that it includes an "extends" key
|
||||
whose value features "key".
|
||||
|
||||
If "extends" is not in yaml, then yaml is modified such that
|
||||
yaml["extends"] == key.
|
||||
|
||||
If yaml["extends"] is a str, then yaml is modified such that
|
||||
yaml["extends"] == [yaml["extends"], key]
|
||||
|
||||
If yaml["extends"] is a list that does not include key, then key is
|
||||
appended to the list.
|
||||
|
||||
Otherwise, yaml is left unchanged.
|
||||
"""
|
||||
|
||||
has_key = "extends" in yaml
|
||||
extends = yaml.get("extends")
|
||||
|
||||
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||
return
|
||||
|
||||
if extends is None:
|
||||
yaml["extends"] = key
|
||||
return
|
||||
|
||||
if isinstance(extends, str):
|
||||
if extends != key:
|
||||
yaml["extends"] = [extends, key]
|
||||
return
|
||||
|
||||
if key not in extends:
|
||||
extends.append(key)
|
||||
|
||||
|
||||
def common_subobject(yaml, sub):
|
||||
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
||||
|
||||
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
||||
|
||||
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
||||
- Otherwise, new[key] = yaml[key].
|
||||
|
||||
If the above match criteria is not satisfied for any such key, then (yaml,
|
||||
None) is returned. The yaml object is returned unchanged.
|
||||
|
||||
Otherwise, each matching value in new is modified as in
|
||||
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
||||
The common_key value is chosen such that it does not match any preexisting
|
||||
key in new. In this case, (new, common_key) is returned.
|
||||
"""
|
||||
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
||||
|
||||
if not match_list:
|
||||
return yaml, None
|
||||
|
||||
common_prefix = ".c"
|
||||
common_index = 0
|
||||
|
||||
while True:
|
||||
common_key = "".join((common_prefix, str(common_index)))
|
||||
if common_key not in yaml:
|
||||
break
|
||||
common_index += 1
|
||||
|
||||
new_yaml = {}
|
||||
|
||||
for key, val in yaml.items():
|
||||
new_yaml[key] = copy.deepcopy(val)
|
||||
|
||||
if not matches(val, sub):
|
||||
continue
|
||||
|
||||
new_yaml[key] = subkeys(new_yaml[key], sub)
|
||||
add_extends(new_yaml[key], common_key)
|
||||
|
||||
new_yaml[common_key] = sub
|
||||
|
||||
return new_yaml, common_key
|
||||
|
||||
|
||||
def print_delta(name, old, new, applied=None):
|
||||
delta = new - old
|
||||
reldelta = (1000 * delta) // old
|
||||
reldelta = (reldelta // 10, reldelta % 10)
|
||||
|
||||
if applied is None:
|
||||
applied = new <= old
|
||||
|
||||
print(
|
||||
"\n".join(
|
||||
(
|
||||
"{0} {1}:",
|
||||
" before: {2: 10d}",
|
||||
" after : {3: 10d}",
|
||||
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
||||
)
|
||||
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
||||
)
|
||||
|
||||
|
||||
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||
"""Try applying an optimization pass and return information about the
|
||||
result
|
||||
|
||||
"name" is a string describing the nature of the pass. If it is a non-empty
|
||||
string, summary statistics are also printed to stdout.
|
||||
|
||||
"yaml" is the object to apply the pass to.
|
||||
|
||||
"optimization_pass" is the function implementing the pass to be applied.
|
||||
|
||||
"args" and "kwargs" are the additional arguments to pass to optimization
|
||||
pass. The pass is applied as
|
||||
|
||||
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
||||
|
||||
The pass's results are greedily rejected if it does not modify the original
|
||||
yaml document, or if it produces a yaml document that serializes to a
|
||||
larger string.
|
||||
|
||||
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
||||
(yaml, new_yaml, applied, other_results) otherwise.
|
||||
"""
|
||||
result = optimization_pass(yaml, *args, **kwargs)
|
||||
new_yaml, other_results = result[0], result[1:]
|
||||
|
||||
if new_yaml is yaml:
|
||||
# pass was not applied
|
||||
return (yaml, new_yaml, False, other_results)
|
||||
|
||||
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||
|
||||
# pass makes the size worse: not applying
|
||||
applied = post_size <= pre_size
|
||||
if applied:
|
||||
yaml, new_yaml = new_yaml, yaml
|
||||
|
||||
if name:
|
||||
print_delta(name, pre_size, post_size, applied)
|
||||
|
||||
return (yaml, new_yaml, applied, other_results)
|
||||
|
||||
|
||||
def build_histogram(iterator, key):
|
||||
"""Builds a histogram of values given an iterable of mappings and a key.
|
||||
|
||||
For each mapping "m" with key "key" in iterator, the value m[key] is
|
||||
considered.
|
||||
|
||||
Returns a list of tuples (hash, count, proportion, value), where
|
||||
|
||||
- "hash" is a sha1sum hash of the value.
|
||||
- "count" is the number of occurences of values that hash to "hash".
|
||||
- "proportion" is the proportion of all values considered above that
|
||||
hash to "hash".
|
||||
- "value" is one of the values considered above that hash to "hash".
|
||||
Which value is chosen when multiple values hash to the same "hash" is
|
||||
undefined.
|
||||
|
||||
The list is sorted in descending order by count, yielding the most
|
||||
frequently occuring hashes first.
|
||||
"""
|
||||
buckets = collections.defaultdict(int)
|
||||
values = {}
|
||||
|
||||
num_objects = 0
|
||||
for obj in iterator:
|
||||
num_objects += 1
|
||||
|
||||
try:
|
||||
val = obj[key]
|
||||
except (KeyError, TypeError):
|
||||
continue
|
||||
|
||||
value_hash = hashlib.sha1()
|
||||
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||
value_hash = value_hash.hexdigest()
|
||||
|
||||
buckets[value_hash] += 1
|
||||
values[value_hash] = val
|
||||
|
||||
return [
|
||||
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
||||
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
||||
]
|
||||
|
||||
|
||||
def optimizer(yaml):
|
||||
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
||||
}
|
||||
|
||||
# look for a list of tags that appear frequently
|
||||
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
||||
|
||||
# If a list of tags is found, and there are more than one job that uses it,
|
||||
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
||||
# add the list to the prototype object.
|
||||
if tags and count > 1 and proportion >= 0.70:
|
||||
common_job["tags"] = tags
|
||||
|
||||
# apply common object factorization
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"general common object factorization", yaml, common_subobject, common_job
|
||||
)
|
||||
|
||||
# look for a common script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"script factorization", yaml, common_subobject, {"script": script}
|
||||
)
|
||||
|
||||
# look for a common before_script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
||||
)
|
||||
|
||||
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
||||
# Try to factor them out.
|
||||
h = build_histogram(
|
||||
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
||||
"SPACK_ROOT_SPEC",
|
||||
)
|
||||
|
||||
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
||||
# environment variable; not just the one that appears with the greatest
|
||||
# frequency. We only require that more than 1 job uses a given instance's
|
||||
# value, because we expect the value to be very large, and so expect even
|
||||
# few-to-one factorizations to yield large space savings.
|
||||
counter = 0
|
||||
for _, count, proportion, spec in h:
|
||||
if count <= 1:
|
||||
continue
|
||||
|
||||
counter += 1
|
||||
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
||||
yaml,
|
||||
common_subobject,
|
||||
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
||||
)
|
||||
|
||||
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
print("\n")
|
||||
print_delta("overall summary", original_size, new_size)
|
||||
print("\n")
|
||||
return yaml
|
@@ -444,7 +444,7 @@ def format_list(specs):
|
||||
def filter_loaded_specs(specs):
|
||||
"""Filter a list of specs returning only those that are
|
||||
currently loaded."""
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
|
@@ -813,7 +813,7 @@ def _push_oci(
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import warnings
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -73,7 +74,7 @@ def setup_parser(subparser):
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
@@ -81,7 +82,7 @@ def setup_parser(subparser):
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
@@ -200,6 +201,18 @@ def ci_generate(args):
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
if args.optimize:
|
||||
warnings.warn(
|
||||
"The --optimize option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
if args.dependencies:
|
||||
warnings.warn(
|
||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
@@ -212,8 +225,6 @@ def ci_generate(args):
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
use_dependencies = args.dependencies
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
@@ -234,8 +245,6 @@ def ci_generate(args):
|
||||
output_file,
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
|
@@ -106,7 +106,8 @@ def clean(parser, args):
|
||||
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
@@ -11,6 +11,7 @@
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -866,6 +867,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
||||
if args.update_completion:
|
||||
fs.set_executable(args.update)
|
||||
|
||||
else:
|
||||
prepend_header(args, sys.stdout)
|
||||
formatter(args, sys.stdout)
|
||||
|
@@ -3,6 +3,9 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.environment as ev
|
||||
@@ -43,5 +46,9 @@ def concretize(parser, args):
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
if not args.quiet:
|
||||
ev.display_specs(concretized_specs)
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
else:
|
||||
tty.msg("No new specs to concretize.")
|
||||
env.write()
|
||||
|
@@ -9,6 +9,8 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
@@ -69,13 +71,15 @@ def _retrieve_develop_source(spec, abspath):
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.stage[0].fetcher.get_full_repo = True
|
||||
source_stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
package.stage[0].disable_mirrors()
|
||||
source_stage.disable_mirrors()
|
||||
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
|
||||
@@ -11,43 +12,13 @@
|
||||
import spack.cmd
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.spec import Spec
|
||||
from spack.util.editor import editor
|
||||
import spack.util.editor
|
||||
|
||||
description = "open package files in $EDITOR"
|
||||
section = "packaging"
|
||||
level = "short"
|
||||
|
||||
|
||||
def edit_package(name, repo_path, namespace):
|
||||
"""Opens the requested package file in your favorite $EDITOR.
|
||||
|
||||
Args:
|
||||
name (str): The name of the package
|
||||
repo_path (str): The path to the repository containing this package
|
||||
namespace (str): A valid namespace registered with Spack
|
||||
"""
|
||||
# Find the location of the package
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
||||
if not os.access(path, os.R_OK):
|
||||
tty.die("Insufficient permissions on '%s'!" % path)
|
||||
else:
|
||||
raise spack.repo.UnknownPackageError(spec.name)
|
||||
|
||||
editor(path)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
excl_args = subparser.add_mutually_exclusive_group()
|
||||
|
||||
@@ -98,41 +69,67 @@ def setup_parser(subparser):
|
||||
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
||||
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
||||
|
||||
subparser.add_argument("package", nargs="?", default=None, help="package name")
|
||||
subparser.add_argument("package", nargs="*", default=None, help="package name")
|
||||
|
||||
|
||||
def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
try:
|
||||
with open(path, "r"):
|
||||
return path
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise spack.repo.UnknownPackageError(name) from e
|
||||
tty.die(f"Cannot edit package: {e}")
|
||||
|
||||
|
||||
def locate_file(name: str, path: str) -> str:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
file_path = os.path.join(path, name)
|
||||
|
||||
# Try to open direct match.
|
||||
try:
|
||||
with open(file_path, "r"):
|
||||
return file_path
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
tty.die(f"Cannot edit file: {e}")
|
||||
pass
|
||||
|
||||
# Otherwise try to find a file that starts with the name
|
||||
candidates = glob.glob(file_path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
|
||||
if len(files) > 1:
|
||||
tty.die(
|
||||
f"Multiple files start with `{name}`:\n"
|
||||
+ "\n".join(f" {os.path.basename(f)}" for f in files)
|
||||
)
|
||||
elif not files:
|
||||
tty.die(f"No file for '{name}' was found in {path}")
|
||||
return files[0]
|
||||
|
||||
|
||||
def edit(parser, args):
|
||||
name = args.package
|
||||
|
||||
# By default, edit package files
|
||||
path = spack.paths.packages_path
|
||||
names = args.package
|
||||
|
||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||
if args.path:
|
||||
path = args.path
|
||||
if name:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
path = os.path.join(path, name)
|
||||
if not os.path.exists(path):
|
||||
files = glob.glob(path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
|
||||
if len(files) > 1:
|
||||
m = "Multiple files exist with the name {0}.".format(name)
|
||||
m += " Please specify a suffix. Files are:\n\n"
|
||||
for f in files:
|
||||
m += " " + os.path.basename(f) + "\n"
|
||||
tty.die(m)
|
||||
if not files:
|
||||
tty.die("No file for '{0}' was found in {1}".format(name, path))
|
||||
path = files[0] # already confirmed only one entry in files
|
||||
|
||||
editor(path)
|
||||
elif name:
|
||||
edit_package(name, args.repo, args.namespace)
|
||||
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
|
||||
spack.util.editor.editor(*paths)
|
||||
elif names:
|
||||
if args.repo:
|
||||
repo = spack.repo.Repo(args.repo)
|
||||
elif args.namespace:
|
||||
repo = spack.repo.PATH.get_repo(args.namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
paths = [locate_package(name, repo) for name in names]
|
||||
spack.util.editor.editor(*paths)
|
||||
else:
|
||||
# By default open the directory where packages live
|
||||
editor(path)
|
||||
spack.util.editor.editor(spack.paths.packages_path)
|
||||
|
@@ -468,30 +468,32 @@ def env_remove(args):
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
remove_envs = []
|
||||
read_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env)
|
||||
valid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
read_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
if name == env_name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
@@ -504,7 +506,7 @@ def env_remove(args):
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in remove_envs:
|
||||
for env in read_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
|
@@ -50,7 +50,7 @@
|
||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||
|
||||
architecture variants:
|
||||
@m{platform=platform} linux, darwin, cray, etc.
|
||||
@m{platform=platform} linux, darwin, freebsd, windows
|
||||
@m{os=operating_system} specific <operating_system>
|
||||
@m{target=target} specific <target> processor
|
||||
@m{arch=platform-os-target} shortcut for all three above
|
||||
|
@@ -10,6 +10,7 @@
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.string import plural
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.build_environment
|
||||
@@ -61,7 +62,6 @@ def install_kwargs_from_args(args):
|
||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||
"include_build_deps": args.include_build_deps,
|
||||
"explicit": True, # Use true as a default for install command
|
||||
"stop_at": args.until,
|
||||
"unsigned": args.unsigned,
|
||||
"install_deps": ("dependencies" in args.things_to_install),
|
||||
@@ -376,7 +376,9 @@ def _maybe_add_and_concretize(args, env, specs):
|
||||
# `spack concretize`
|
||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
@@ -473,6 +475,7 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
|
||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
||||
builder = PackageInstaller(installs)
|
||||
installs = [s.package for s in concrete_specs]
|
||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||
builder = PackageInstaller(installs, install_kwargs)
|
||||
builder.install()
|
||||
|
@@ -114,15 +114,16 @@ def _process_result(result, show, required_format, kwargs):
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if "solutions" in show:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == "json":
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
if required_format:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == "json":
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
|
||||
print()
|
||||
|
||||
if result.unsolved_specs and "solutions" in show:
|
||||
|
@@ -105,11 +105,19 @@ def spec(parser, args):
|
||||
if env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
if not args.format:
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With -y, just print YAML to output.
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
|
@@ -151,7 +151,8 @@ def is_installed(spec):
|
||||
key=lambda s: s.dag_hash(),
|
||||
)
|
||||
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
|
||||
|
||||
def dependent_environments(
|
||||
@@ -239,6 +240,8 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
||||
print()
|
||||
tty.info("The following environments still reference these specs:")
|
||||
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
||||
if env:
|
||||
msgs.append("use `spack remove` to remove the spec from the current environment")
|
||||
msgs.append("use `spack env remove` to remove environments")
|
||||
msgs.append("use `spack uninstall --force` to override")
|
||||
print()
|
||||
|
@@ -71,7 +71,7 @@ def unload(parser, args):
|
||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||
)
|
||||
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
if args.specs:
|
||||
specs = [
|
||||
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
||||
|
@@ -695,10 +695,6 @@ def compiler_environment(self):
|
||||
try:
|
||||
# load modules and set env variables
|
||||
for module in self.modules:
|
||||
# On cray, mic-knl module cannot be loaded without cce module
|
||||
# See: https://github.com/spack/spack/issues/3153
|
||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
||||
spack.util.module_cmd.load_module("cce")
|
||||
spack.util.module_cmd.load_module(module)
|
||||
|
||||
# apply other compiler environment changes
|
||||
|
@@ -96,6 +96,8 @@ def verbose_flag(self):
|
||||
|
||||
openmp_flag = "-fopenmp"
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.real_version < Version("3.3"):
|
||||
@@ -120,6 +122,24 @@ def cxx17_flag(self):
|
||||
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("12.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||
elif self.real_version < Version("17.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
@@ -142,7 +162,10 @@ def c17_flag(self):
|
||||
def c23_flag(self):
|
||||
if self.real_version < Version("9.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||
return "-std=c2x"
|
||||
elif self.real_version < Version("18.0"):
|
||||
return "-std=c2x"
|
||||
else:
|
||||
return "-std=c23"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
|
@@ -78,24 +78,17 @@
|
||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||
}
|
||||
},
|
||||
"centos:stream": {
|
||||
"centos:stream9": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
"template": "container/centos_stream9.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream",
|
||||
"build": "spack/centos-stream9",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_7.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"build": "spack/centos7"
|
||||
},
|
||||
"opensuse/leap:15": {
|
||||
"bootstrap": {
|
||||
"template": "container/leap-15.dockerfile"
|
||||
|
@@ -97,7 +97,7 @@ class OpenMpi(Package):
|
||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||
|
||||
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
||||
|
||||
|
||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
|
@@ -24,6 +24,7 @@
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -1190,6 +1191,7 @@ def scope_name(self):
|
||||
def include_concrete_envs(self):
|
||||
"""Copy and save the included envs' specs internally"""
|
||||
|
||||
lockfile_meta = None
|
||||
root_hash_seen = set()
|
||||
concrete_hash_seen = set()
|
||||
self.included_concrete_spec_data = {}
|
||||
@@ -1200,26 +1202,37 @@ def include_concrete_envs(self):
|
||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||
|
||||
env = Environment(env_path)
|
||||
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
|
||||
|
||||
with open(env.lock_path) as f:
|
||||
lockfile_as_dict = env._read_lockfile(f)
|
||||
|
||||
# Lockfile_meta must match each env and use at least format version 5
|
||||
if lockfile_meta is None:
|
||||
lockfile_meta = lockfile_as_dict["_meta"]
|
||||
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||
elif lockfile_meta["lockfile-version"] < 5:
|
||||
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||
|
||||
# Copy unique root specs from env
|
||||
for root_dict in env._concrete_roots_dict():
|
||||
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||
for root_dict in lockfile_as_dict["roots"]:
|
||||
if root_dict["hash"] not in root_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||
root_hash_seen.add(root_dict["hash"])
|
||||
|
||||
# Copy unique concrete specs from env
|
||||
for dag_hash, spec_details in env._concrete_specs_dict().items():
|
||||
if dag_hash not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
|
||||
{dag_hash: spec_details}
|
||||
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||
if concrete_spec not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path].update(
|
||||
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||
)
|
||||
concrete_hash_seen.add(dag_hash)
|
||||
concrete_hash_seen.add(concrete_spec)
|
||||
|
||||
# Copy transitive include data
|
||||
transitive = env.included_concrete_spec_data
|
||||
if transitive:
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
|
||||
if "include_concrete" in lockfile_as_dict.keys():
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||
"include_concrete"
|
||||
]
|
||||
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
self.write()
|
||||
@@ -1936,13 +1949,19 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
||||
specs = specs if specs is not None else roots
|
||||
|
||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||
install_args["overwrite"] = (
|
||||
install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
|
||||
overwrite: Set[str] = set()
|
||||
overwrite.update(install_args.get("overwrite", []), self._dev_specs_that_need_overwrite())
|
||||
install_args["overwrite"] = overwrite
|
||||
|
||||
explicit: Set[str] = set()
|
||||
explicit.update(
|
||||
install_args.get("explicit", []),
|
||||
(s.dag_hash() for s in specs),
|
||||
(s.dag_hash() for s in roots),
|
||||
)
|
||||
install_args["explicit"] = explicit
|
||||
|
||||
installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
|
||||
|
||||
PackageInstaller(installs).install()
|
||||
PackageInstaller([spec.package for spec in specs], install_args).install()
|
||||
|
||||
def all_specs_generator(self) -> Iterable[Spec]:
|
||||
"""Returns a generator for all concrete specs"""
|
||||
@@ -2132,23 +2151,16 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
|
||||
return specs
|
||||
|
||||
def _concrete_specs_dict(self):
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
concrete_specs = {}
|
||||
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[s.dag_hash()] = spec_dict
|
||||
return concrete_specs
|
||||
|
||||
def _concrete_roots_dict(self):
|
||||
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
||||
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
|
||||
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
concrete_specs = self._concrete_specs_dict()
|
||||
root_specs = self._concrete_roots_dict()
|
||||
|
||||
spack_dict = {"version": spack.spack_version}
|
||||
spack_commit = spack.main.get_spack_commit()
|
||||
@@ -2169,7 +2181,7 @@ def _to_lockfile_dict(self):
|
||||
# spack version information
|
||||
"spack": spack_dict,
|
||||
# users specs + hashes are the 'roots' of the environment
|
||||
"roots": root_specs,
|
||||
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
||||
# Concrete specs by hash, including dependencies
|
||||
"concrete_specs": concrete_specs,
|
||||
}
|
||||
@@ -2462,27 +2474,21 @@ def _equiv_dict(first, second):
|
||||
return same_values and same_keys_with_same_overrides
|
||||
|
||||
|
||||
def display_specs(concretized_specs):
|
||||
"""Displays the list of specs returned by `Environment.concretize()`.
|
||||
def display_specs(specs):
|
||||
"""Displays a list of specs traversed breadth-first, covering nodes, with install status.
|
||||
|
||||
Args:
|
||||
concretized_specs (list): list of specs returned by
|
||||
`Environment.concretize()`
|
||||
specs (list): list of specs
|
||||
"""
|
||||
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
)
|
||||
|
||||
for user_spec, concrete_spec in concretized_specs:
|
||||
tty.msg("Concretized {0}".format(user_spec))
|
||||
sys.stdout.write(_tree_to_display(concrete_spec))
|
||||
print("")
|
||||
tree_string = spack.spec.tree(
|
||||
specs,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
hashes=True,
|
||||
hashlen=7,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
|
@@ -554,7 +554,7 @@ def fetch(self):
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except urllib.error.URLError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
|
@@ -13,7 +13,6 @@
|
||||
import spack.config
|
||||
import spack.relocate
|
||||
from spack.util.elf import ElfParsingError, parse_elf
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
def is_shared_library_elf(filepath):
|
||||
@@ -141,7 +140,7 @@ def post_install(spec, explicit=None):
|
||||
return
|
||||
|
||||
# Only enable on platforms using ELF.
|
||||
if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"):
|
||||
if not spec.satisfies("platform=linux"):
|
||||
return
|
||||
|
||||
# Disable this hook when bootstrapping, to avoid recursion.
|
||||
@@ -149,10 +148,9 @@ def post_install(spec, explicit=None):
|
||||
return
|
||||
|
||||
# Should failing to locate patchelf be a hard error?
|
||||
patchelf_path = spack.relocate._patchelf()
|
||||
if not patchelf_path:
|
||||
patchelf = spack.relocate._patchelf()
|
||||
if not patchelf:
|
||||
return
|
||||
patchelf = Executable(patchelf_path)
|
||||
|
||||
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
||||
|
||||
|
@@ -117,7 +117,7 @@ def post_install(spec, explicit=None):
|
||||
return
|
||||
|
||||
# Only enable on platforms using ELF.
|
||||
if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"):
|
||||
if not spec.satisfies("platform=linux"):
|
||||
return
|
||||
|
||||
visit_directory_tree(spec.prefix, ElfFilesWithRPathVisitor())
|
||||
|
@@ -600,9 +600,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
if node is spec:
|
||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||
elif source_pkg_dir:
|
||||
fs.install_tree(
|
||||
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
|
||||
|
||||
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||
@@ -761,12 +759,8 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
if not self.pkg.spec.concrete:
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
|
||||
# Cache the package phase options with the explicit package,
|
||||
# popping the options to ensure installation of associated
|
||||
# dependencies is NOT affected by these options.
|
||||
|
||||
self.pkg.stop_before_phase = install_args.pop("stop_before", None) # type: ignore[attr-defined] # noqa: E501
|
||||
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
|
||||
self.pkg.stop_before_phase = install_args.get("stop_before") # type: ignore[attr-defined] # noqa: E501
|
||||
self.pkg.last_phase = install_args.get("stop_at") # type: ignore[attr-defined]
|
||||
|
||||
# Cache the package id for convenience
|
||||
self.pkg_id = package_id(pkg.spec)
|
||||
@@ -1076,19 +1070,17 @@ def flag_installed(self, installed: List[str]) -> None:
|
||||
|
||||
@property
|
||||
def explicit(self) -> bool:
|
||||
"""The package was explicitly requested by the user."""
|
||||
return self.is_root and self.request.install_args.get("explicit", True)
|
||||
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
|
||||
|
||||
@property
|
||||
def is_root(self) -> bool:
|
||||
"""The package was requested directly, but may or may not be explicit
|
||||
in an environment."""
|
||||
def is_build_request(self) -> bool:
|
||||
"""The package was requested directly"""
|
||||
return self.pkg == self.request.pkg
|
||||
|
||||
@property
|
||||
def use_cache(self) -> bool:
|
||||
_use_cache = True
|
||||
if self.is_root:
|
||||
if self.is_build_request:
|
||||
return self.request.install_args.get("package_use_cache", _use_cache)
|
||||
else:
|
||||
return self.request.install_args.get("dependencies_use_cache", _use_cache)
|
||||
@@ -1096,7 +1088,7 @@ def use_cache(self) -> bool:
|
||||
@property
|
||||
def cache_only(self) -> bool:
|
||||
_cache_only = False
|
||||
if self.is_root:
|
||||
if self.is_build_request:
|
||||
return self.request.install_args.get("package_cache_only", _cache_only)
|
||||
else:
|
||||
return self.request.install_args.get("dependencies_cache_only", _cache_only)
|
||||
@@ -1122,24 +1114,17 @@ def priority(self):
|
||||
|
||||
class PackageInstaller:
|
||||
"""
|
||||
Class for managing the install process for a Spack instance based on a
|
||||
bottom-up DAG approach.
|
||||
Class for managing the install process for a Spack instance based on a bottom-up DAG approach.
|
||||
|
||||
This installer can coordinate concurrent batch and interactive, local
|
||||
and distributed (on a shared file system) builds for the same Spack
|
||||
instance.
|
||||
This installer can coordinate concurrent batch and interactive, local and distributed (on a
|
||||
shared file system) builds for the same Spack instance.
|
||||
"""
|
||||
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
||||
"""Initialize the installer.
|
||||
|
||||
Args:
|
||||
installs (list): list of tuples, where each
|
||||
tuple consists of a package (PackageBase) and its associated
|
||||
install arguments (dict)
|
||||
"""
|
||||
def __init__(
|
||||
self, packages: List["spack.package_base.PackageBase"], install_args: dict
|
||||
) -> None:
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages]
|
||||
|
||||
# Priority queue of build tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
||||
@@ -1557,17 +1542,6 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
tty.warn(f"Installation request refused: {str(err)}")
|
||||
return
|
||||
|
||||
# Skip out early if the spec is not being installed locally (i.e., if
|
||||
# external or upstream).
|
||||
#
|
||||
# External and upstream packages need to get flagged as installed to
|
||||
# ensure proper status tracking for environment build.
|
||||
explicit = request.install_args.get("explicit", True)
|
||||
not_local = _handle_external_and_upstream(request.pkg, explicit)
|
||||
if not_local:
|
||||
self._flag_installed(request.pkg)
|
||||
return
|
||||
|
||||
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
||||
|
||||
install_deps = request.install_args.get("install_deps")
|
||||
@@ -1683,10 +1657,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
if not pkg.unit_test_check():
|
||||
return
|
||||
|
||||
# Injecting information to know if this installation request is the root one
|
||||
# to determine in BuildProcessInstaller whether installation is explicit or not
|
||||
install_args["is_root"] = task.is_root
|
||||
|
||||
try:
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
@@ -1998,8 +1968,8 @@ def install(self) -> None:
|
||||
|
||||
self._init_queue()
|
||||
fail_fast_err = "Terminating after first install failure"
|
||||
single_explicit_spec = len(self.build_requests) == 1
|
||||
failed_explicits = []
|
||||
single_requested_spec = len(self.build_requests) == 1
|
||||
failed_build_requests = []
|
||||
|
||||
install_status = InstallStatus(len(self.build_pq))
|
||||
|
||||
@@ -2048,11 +2018,10 @@ def install(self) -> None:
|
||||
# Skip the installation if the spec is not being installed locally
|
||||
# (i.e., if external or upstream) BUT flag it as installed since
|
||||
# some package likely depends on it.
|
||||
if not task.explicit:
|
||||
if _handle_external_and_upstream(pkg, False):
|
||||
term_status.clear()
|
||||
self._flag_installed(pkg, task.dependents)
|
||||
continue
|
||||
if _handle_external_and_upstream(pkg, task.explicit):
|
||||
term_status.clear()
|
||||
self._flag_installed(pkg, task.dependents)
|
||||
continue
|
||||
|
||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
@@ -2197,14 +2166,11 @@ def install(self) -> None:
|
||||
if self.fail_fast:
|
||||
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||
|
||||
# Terminate at this point if the single explicit spec has
|
||||
# failed to install.
|
||||
if single_explicit_spec and task.explicit:
|
||||
raise
|
||||
|
||||
# Track explicit spec id and error to summarize when done
|
||||
if task.explicit:
|
||||
failed_explicits.append((pkg, pkg_id, str(exc)))
|
||||
# Terminate when a single build request has failed, or summarize errors later.
|
||||
if task.is_build_request:
|
||||
if single_requested_spec:
|
||||
raise
|
||||
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -2227,16 +2193,16 @@ def install(self) -> None:
|
||||
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
||||
]
|
||||
|
||||
if failed_explicits or missing:
|
||||
for _, pkg_id, err in failed_explicits:
|
||||
if failed_build_requests or missing:
|
||||
for _, pkg_id, err in failed_build_requests:
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
|
||||
if len(failed_explicits) > 0:
|
||||
pkg = failed_explicits[0][0]
|
||||
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
||||
if len(failed_build_requests) > 0:
|
||||
pkg = failed_build_requests[0][0]
|
||||
ids = [pkg_id for _, pkg_id, _ in failed_build_requests]
|
||||
tty.debug(
|
||||
"Associating installation failure with first failed "
|
||||
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
||||
@@ -2295,7 +2261,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
self.verbose = bool(install_args.get("verbose", False))
|
||||
|
||||
# whether installation was explicitly requested by the user
|
||||
self.explicit = install_args.get("is_root", False) and install_args.get("explicit", True)
|
||||
self.explicit = pkg.spec.dag_hash() in install_args.get("explicit", [])
|
||||
|
||||
# env before starting installation
|
||||
self.unmodified_env = install_args.get("unmodified_env", {})
|
||||
@@ -2380,9 +2346,7 @@ def _install_source(self) -> None:
|
||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||
tty.debug(f"{self.pre} Copying source to {src_target}")
|
||||
|
||||
fs.install_tree(
|
||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
|
||||
def _real_install(self) -> None:
|
||||
import spack.builder
|
||||
|
@@ -3,22 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from ._operating_system import OperatingSystem
|
||||
from .cray_backend import CrayBackend
|
||||
from .cray_frontend import CrayFrontend
|
||||
from .freebsd import FreeBSDOs
|
||||
from .linux_distro import LinuxDistro
|
||||
from .mac_os import MacOs
|
||||
from .windows_os import WindowsOs
|
||||
|
||||
__all__ = [
|
||||
"OperatingSystem",
|
||||
"LinuxDistro",
|
||||
"MacOs",
|
||||
"CrayFrontend",
|
||||
"CrayBackend",
|
||||
"WindowsOs",
|
||||
"FreeBSDOs",
|
||||
]
|
||||
__all__ = ["OperatingSystem", "LinuxDistro", "MacOs", "WindowsOs", "FreeBSDOs"]
|
||||
|
||||
#: List of all the Operating Systems known to Spack
|
||||
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs, FreeBSDOs]
|
||||
operating_systems = [LinuxDistro, MacOs, WindowsOs, FreeBSDOs]
|
||||
|
@@ -1,172 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.error
|
||||
import spack.version
|
||||
from spack.util.module_cmd import module
|
||||
|
||||
from .linux_distro import LinuxDistro
|
||||
|
||||
#: Possible locations of the Cray CLE release file,
|
||||
#: which we look at to get the CNL OS version.
|
||||
_cle_release_file = "/etc/opt/cray/release/cle-release"
|
||||
_clerelease_file = "/etc/opt/cray/release/clerelease"
|
||||
|
||||
|
||||
def read_cle_release_file():
|
||||
"""Read the CLE release file and return a dict with its attributes.
|
||||
|
||||
This file is present on newer versions of Cray.
|
||||
|
||||
The release file looks something like this::
|
||||
|
||||
RELEASE=6.0.UP07
|
||||
BUILD=6.0.7424
|
||||
...
|
||||
|
||||
The dictionary we produce looks like this::
|
||||
|
||||
{
|
||||
"RELEASE": "6.0.UP07",
|
||||
"BUILD": "6.0.7424",
|
||||
...
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: dictionary of release attributes
|
||||
"""
|
||||
with open(_cle_release_file) as release_file:
|
||||
result = {}
|
||||
for line in release_file:
|
||||
# use partition instead of split() to ensure we only split on
|
||||
# the first '=' in the line.
|
||||
key, _, value = line.partition("=")
|
||||
result[key] = value.strip()
|
||||
return result
|
||||
|
||||
|
||||
def read_clerelease_file():
|
||||
"""Read the CLE release file and return the Cray OS version.
|
||||
|
||||
This file is present on older versions of Cray.
|
||||
|
||||
The release file looks something like this::
|
||||
|
||||
5.2.UP04
|
||||
|
||||
Returns:
|
||||
str: the Cray OS version
|
||||
"""
|
||||
with open(_clerelease_file) as release_file:
|
||||
for line in release_file:
|
||||
return line.strip()
|
||||
|
||||
|
||||
class CrayBackend(LinuxDistro):
|
||||
"""Compute Node Linux (CNL) is the operating system used for the Cray XC
|
||||
series super computers. It is a very stripped down version of GNU/Linux.
|
||||
Any compilers found through this operating system will be used with
|
||||
modules. If updated, user must make sure that version and name are
|
||||
updated to indicate that OS has been upgraded (or downgraded)
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
name = "cnl"
|
||||
version = self._detect_crayos_version()
|
||||
if version:
|
||||
# If we found a CrayOS version, we do not want the information
|
||||
# from LinuxDistro. In order to skip the logic from
|
||||
# distro.linux_distribution, while still calling __init__
|
||||
# methods further up the MRO, we skip LinuxDistro in the MRO and
|
||||
# call the OperatingSystem superclass __init__ method
|
||||
super(LinuxDistro, self).__init__(name, version)
|
||||
else:
|
||||
super().__init__()
|
||||
self.modulecmd = module
|
||||
|
||||
def __str__(self):
|
||||
return self.name + str(self.version)
|
||||
|
||||
@classmethod
|
||||
def _detect_crayos_version(cls):
|
||||
if os.path.isfile(_cle_release_file):
|
||||
release_attrs = read_cle_release_file()
|
||||
if "RELEASE" not in release_attrs:
|
||||
# This Cray system uses a base OS not CLE/CNL
|
||||
return None
|
||||
v = spack.version.Version(release_attrs["RELEASE"])
|
||||
return v[0]
|
||||
elif os.path.isfile(_clerelease_file):
|
||||
v = read_clerelease_file()
|
||||
return spack.version.Version(v)[0]
|
||||
else:
|
||||
# Not all Cray systems run CNL on the backend.
|
||||
# Systems running in what Cray calls "cluster" mode run other
|
||||
# linux OSs under the Cray PE.
|
||||
# So if we don't detect any Cray OS version on the system,
|
||||
# we return None. We can't ever be sure we will get a Cray OS
|
||||
# version.
|
||||
# Returning None allows the calling code to test for the value
|
||||
# being "True-ish" rather than requiring a try/except block.
|
||||
return None
|
||||
|
||||
def arguments_to_detect_version_fn(self, paths):
|
||||
import spack.compilers
|
||||
|
||||
command_arguments = []
|
||||
for compiler_name in spack.compilers.supported_compilers():
|
||||
cmp_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
|
||||
# If the compiler doesn't have a corresponding
|
||||
# Programming Environment, skip to the next
|
||||
if cmp_cls.PrgEnv is None:
|
||||
continue
|
||||
|
||||
if cmp_cls.PrgEnv_compiler is None:
|
||||
tty.die("Must supply PrgEnv_compiler with PrgEnv")
|
||||
|
||||
compiler_id = spack.compilers.CompilerID(self, compiler_name, None)
|
||||
detect_version_args = spack.compilers.DetectVersionArgs(
|
||||
id=compiler_id, variation=(None, None), language="cc", path="cc"
|
||||
)
|
||||
command_arguments.append(detect_version_args)
|
||||
return command_arguments
|
||||
|
||||
def detect_version(self, detect_version_args):
|
||||
import spack.compilers
|
||||
|
||||
modulecmd = self.modulecmd
|
||||
compiler_name = detect_version_args.id.compiler_name
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
output = modulecmd("avail", compiler_cls.PrgEnv_compiler)
|
||||
version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
|
||||
matches = re.findall(version_regex, output)
|
||||
version = tuple(version for _, version in matches if "classic" not in version)
|
||||
compiler_id = detect_version_args.id
|
||||
value = detect_version_args._replace(id=compiler_id._replace(version=version))
|
||||
return value, None
|
||||
|
||||
def make_compilers(self, compiler_id, paths):
|
||||
import spack.spec
|
||||
|
||||
name = compiler_id.compiler_name
|
||||
cmp_cls = spack.compilers.class_for_compiler_name(name)
|
||||
compilers = []
|
||||
for v in compiler_id.version:
|
||||
comp = cmp_cls(
|
||||
spack.spec.CompilerSpec(name + "@=" + v),
|
||||
self,
|
||||
"any",
|
||||
["cc", "CC", "ftn"],
|
||||
[cmp_cls.PrgEnv, name + "/" + v],
|
||||
)
|
||||
|
||||
compilers.append(comp)
|
||||
return compilers
|
@@ -1,105 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.module_cmd import module
|
||||
|
||||
from .linux_distro import LinuxDistro
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def unload_programming_environment():
|
||||
"""Context manager that unloads Cray Programming Environments."""
|
||||
env_bu = None
|
||||
|
||||
# We rely on the fact that the PrgEnv-* modules set the PE_ENV
|
||||
# environment variable.
|
||||
if "PE_ENV" in os.environ:
|
||||
# Copy environment variables to restore them after the compiler
|
||||
# detection. We expect that the only thing PrgEnv-* modules do is
|
||||
# the environment variables modifications.
|
||||
env_bu = os.environ.copy()
|
||||
|
||||
# Get the name of the module from the environment variable.
|
||||
prg_env = "PrgEnv-" + os.environ["PE_ENV"].lower()
|
||||
|
||||
# Unload the PrgEnv-* module. By doing this we intentionally
|
||||
# provoke errors when the Cray's compiler wrappers are executed
|
||||
# (Error: A PrgEnv-* modulefile must be loaded.) so they will not
|
||||
# be detected as valid compilers by the overridden method. We also
|
||||
# expect that the modules that add the actual compilers' binaries
|
||||
# into the PATH environment variable (i.e. the following modules:
|
||||
# 'intel', 'cce', 'gcc', etc.) will also be unloaded since they are
|
||||
# specified as prerequisites in the PrgEnv-* modulefiles.
|
||||
module("unload", prg_env)
|
||||
|
||||
yield
|
||||
|
||||
# Restore the environment.
|
||||
if env_bu is not None:
|
||||
os.environ.clear()
|
||||
os.environ.update(env_bu)
|
||||
|
||||
|
||||
class CrayFrontend(LinuxDistro):
|
||||
"""Represents OS that runs on login and service nodes of the Cray platform.
|
||||
It acts as a regular Linux without Cray-specific modules and compiler
|
||||
wrappers."""
|
||||
|
||||
@property
|
||||
def compiler_search_paths(self):
|
||||
"""Calls the default function but unloads Cray's programming
|
||||
environments first.
|
||||
|
||||
This prevents from detecting Cray compiler wrappers and avoids
|
||||
possible false detections.
|
||||
"""
|
||||
import spack.compilers
|
||||
|
||||
with unload_programming_environment():
|
||||
search_paths = get_path("PATH")
|
||||
|
||||
extract_path_re = re.compile(r"prepend-path[\s]*PATH[\s]*([/\w\.:-]*)")
|
||||
|
||||
for compiler_cls in spack.compilers.all_compiler_types():
|
||||
# Check if the compiler class is supported on Cray
|
||||
prg_env = getattr(compiler_cls, "PrgEnv", None)
|
||||
compiler_module = getattr(compiler_cls, "PrgEnv_compiler", None)
|
||||
if not (prg_env and compiler_module):
|
||||
continue
|
||||
|
||||
# It is supported, check which versions are available
|
||||
output = module("avail", compiler_cls.PrgEnv_compiler)
|
||||
version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
|
||||
matches = re.findall(version_regex, output)
|
||||
versions = tuple(version for _, version in matches if "classic" not in version)
|
||||
|
||||
# Now inspect the modules and add to paths
|
||||
msg = "[CRAY FE] Detected FE compiler [name={0}, versions={1}]"
|
||||
tty.debug(msg.format(compiler_module, versions))
|
||||
for v in versions:
|
||||
try:
|
||||
current_module = compiler_module + "/" + v
|
||||
out = module("show", current_module)
|
||||
match = extract_path_re.search(out)
|
||||
search_paths += match.group(1).split(":")
|
||||
except Exception as e:
|
||||
msg = (
|
||||
"[CRAY FE] An unexpected error occurred while "
|
||||
"detecting FE compiler [compiler={0}, "
|
||||
" version={1}, error={2}]"
|
||||
)
|
||||
tty.debug(msg.format(compiler_cls.name, v, str(e)))
|
||||
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths))
|
||||
return fs.search_paths_for_executables(*search_paths)
|
@@ -199,10 +199,10 @@ def __init__(cls, name, bases, attr_dict):
|
||||
# assumed to be detectable
|
||||
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
||||
# Append a tag to each detectable package, so that finding them is faster
|
||||
if not hasattr(cls, "tags"):
|
||||
if hasattr(cls, "tags"):
|
||||
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
|
||||
else:
|
||||
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
||||
elif DetectablePackageMeta.TAG not in cls.tags:
|
||||
cls.tags.append(DetectablePackageMeta.TAG)
|
||||
|
||||
@classmethod
|
||||
def platform_executables(cls):
|
||||
@@ -621,10 +621,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
||||
#: By default do not run tests within package's install()
|
||||
run_tests = False
|
||||
|
||||
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
||||
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
||||
keep_werror: Optional[str] = None
|
||||
|
||||
#: Most packages are NOT extendable. Set to True if you want extensions.
|
||||
extendable = False
|
||||
|
||||
@@ -930,6 +926,32 @@ def global_license_file(self):
|
||||
self.global_license_dir, self.name, os.path.basename(self.license_files[0])
|
||||
)
|
||||
|
||||
# NOTE: return type should be Optional[Literal['all', 'specific', 'none']] in
|
||||
# Python 3.8+, but we still support 3.6.
|
||||
@property
|
||||
def keep_werror(self) -> Optional[str]:
|
||||
"""Keep ``-Werror`` flags, matches ``config:flags:keep_werror`` to override config.
|
||||
|
||||
Valid return values are:
|
||||
* ``"all"``: keep all ``-Werror`` flags.
|
||||
* ``"specific"``: keep only ``-Werror=specific-warning`` flags.
|
||||
* ``"none"``: filter out all ``-Werror*`` flags.
|
||||
* ``None``: respect the user's configuration (``"none"`` by default).
|
||||
"""
|
||||
if self.spec.satisfies("%nvhpc@:23.3") or self.spec.satisfies("%pgi"):
|
||||
# Filtering works by replacing -Werror with -Wno-error, but older nvhpc and
|
||||
# PGI do not understand -Wno-error, so we disable filtering.
|
||||
return "all"
|
||||
|
||||
elif self.spec.satisfies("%nvhpc@23.4:"):
|
||||
# newer nvhpc supports -Wno-error but can't disable specific warnings with
|
||||
# -Wno-error=warning. Skip -Werror=warning, but still filter -Werror.
|
||||
return "specific"
|
||||
|
||||
else:
|
||||
# use -Werror disablement by default for other compilers
|
||||
return None
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
if not self.spec.versions.concrete:
|
||||
@@ -1876,7 +1898,10 @@ def do_install(self, **kwargs):
|
||||
verbose (bool): Display verbose build output (by default,
|
||||
suppresses it)
|
||||
"""
|
||||
PackageInstaller([(self, kwargs)]).install()
|
||||
explicit = kwargs.get("explicit", True)
|
||||
if isinstance(explicit, bool):
|
||||
kwargs["explicit"] = {self.spec.dag_hash()} if explicit else set()
|
||||
PackageInstaller([self], kwargs).install()
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the routine made available to
|
||||
|
@@ -6,7 +6,6 @@
|
||||
|
||||
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
|
||||
from ._platform import Platform
|
||||
from .cray import Cray
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
from .linux import Linux
|
||||
@@ -15,7 +14,6 @@
|
||||
|
||||
__all__ = [
|
||||
"Platform",
|
||||
"Cray",
|
||||
"Darwin",
|
||||
"Linux",
|
||||
"FreeBSD",
|
||||
|
@@ -8,7 +8,6 @@
|
||||
|
||||
import spack.util.environment
|
||||
|
||||
from .cray import Cray
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
from .linux import Linux
|
||||
@@ -16,7 +15,7 @@
|
||||
from .windows import Windows
|
||||
|
||||
#: List of all the platform classes known to Spack
|
||||
platforms = [Cray, Darwin, Linux, Windows, FreeBSD, Test]
|
||||
platforms = [Darwin, Linux, Windows, FreeBSD, Test]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
@@ -2,254 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import re
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.target
|
||||
import spack.version
|
||||
from spack.operating_systems.cray_backend import CrayBackend
|
||||
from spack.operating_systems.cray_frontend import CrayFrontend
|
||||
from spack.paths import build_env_path
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.module_cmd import module
|
||||
|
||||
from ._platform import NoPlatformError, Platform
|
||||
|
||||
_craype_name_to_target_name = {
|
||||
"x86-cascadelake": "cascadelake",
|
||||
"x86-naples": "zen",
|
||||
"x86-rome": "zen2",
|
||||
"x86-milan": "zen3",
|
||||
"x86-skylake": "skylake_avx512",
|
||||
"mic-knl": "mic_knl",
|
||||
"interlagos": "bulldozer",
|
||||
"abudhabi": "piledriver",
|
||||
}
|
||||
|
||||
_ex_craype_dir = "/opt/cray/pe/cpe"
|
||||
_xc_craype_dir = "/opt/cray/pe/cdt"
|
||||
|
||||
|
||||
def slingshot_network():
|
||||
return os.path.exists("/opt/cray/pe") and (
|
||||
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
||||
)
|
||||
|
||||
|
||||
def _target_name_from_craype_target_name(name):
|
||||
return _craype_name_to_target_name.get(name, name)
|
||||
|
||||
|
||||
class Cray(Platform):
|
||||
priority = 10
|
||||
|
||||
def __init__(self):
|
||||
"""Create a Cray system platform.
|
||||
|
||||
Target names should use craype target names but not include the
|
||||
'craype-' prefix. Uses first viable target from:
|
||||
self
|
||||
envars [SPACK_FRONT_END, SPACK_BACK_END]
|
||||
configuration file "targets.yaml" with keys 'front_end', 'back_end'
|
||||
scanning /etc/bash/bashrc.local for back_end only
|
||||
"""
|
||||
super().__init__("cray")
|
||||
|
||||
# Make all craype targets available.
|
||||
for target in self._avail_targets():
|
||||
name = _target_name_from_craype_target_name(target)
|
||||
self.add_target(name, spack.target.Target(name, "craype-%s" % target))
|
||||
|
||||
self.back_end = os.environ.get("SPACK_BACK_END", self._default_target_from_env())
|
||||
self.default = self.back_end
|
||||
if self.back_end not in self.targets:
|
||||
# We didn't find a target module for the backend
|
||||
raise NoPlatformError()
|
||||
|
||||
# Setup frontend targets
|
||||
for name in archspec.cpu.TARGETS:
|
||||
if name not in self.targets:
|
||||
self.add_target(name, spack.target.Target(name))
|
||||
self.front_end = os.environ.get("SPACK_FRONT_END", archspec.cpu.host().name)
|
||||
if self.front_end not in self.targets:
|
||||
self.add_target(self.front_end, spack.target.Target(self.front_end))
|
||||
|
||||
front_distro = CrayFrontend()
|
||||
back_distro = CrayBackend()
|
||||
|
||||
self.default_os = str(back_distro)
|
||||
self.back_os = self.default_os
|
||||
self.front_os = str(front_distro)
|
||||
|
||||
self.add_operating_system(self.back_os, back_distro)
|
||||
if self.front_os != self.back_os:
|
||||
self.add_operating_system(self.front_os, front_distro)
|
||||
|
||||
def setup_platform_environment(self, pkg, env):
|
||||
"""Change the linker to default dynamic to be more
|
||||
similar to linux/standard linker behavior
|
||||
"""
|
||||
# Unload these modules to prevent any silent linking or unnecessary
|
||||
# I/O profiling in the case of darshan.
|
||||
modules_to_unload = ["cray-mpich", "darshan", "cray-libsci", "altd"]
|
||||
for mod in modules_to_unload:
|
||||
module("unload", mod)
|
||||
|
||||
env.set("CRAYPE_LINK_TYPE", "dynamic")
|
||||
cray_wrapper_names = os.path.join(build_env_path, "cray")
|
||||
|
||||
if os.path.isdir(cray_wrapper_names):
|
||||
env.prepend_path("PATH", cray_wrapper_names)
|
||||
env.prepend_path("SPACK_ENV_PATH", cray_wrapper_names)
|
||||
|
||||
# Makes spack installed pkg-config work on Crays
|
||||
env.append_path("PKG_CONFIG_PATH", "/usr/lib64/pkgconfig")
|
||||
env.append_path("PKG_CONFIG_PATH", "/usr/local/lib64/pkgconfig")
|
||||
|
||||
# CRAY_LD_LIBRARY_PATH is used at build time by the cray compiler
|
||||
# wrappers to augment LD_LIBRARY_PATH. This is to avoid long load
|
||||
# times at runtime. This behavior is not always respected on cray
|
||||
# "cluster" systems, so we reproduce it here.
|
||||
if os.environ.get("CRAY_LD_LIBRARY_PATH"):
|
||||
env.prepend_path("LD_LIBRARY_PATH", os.environ["CRAY_LD_LIBRARY_PATH"])
|
||||
|
||||
@classmethod
|
||||
def craype_type_and_version(cls):
|
||||
if os.path.isdir(_ex_craype_dir):
|
||||
craype_dir = _ex_craype_dir
|
||||
craype_type = "EX"
|
||||
elif os.path.isdir(_xc_craype_dir):
|
||||
craype_dir = _xc_craype_dir
|
||||
craype_type = "XC"
|
||||
else:
|
||||
return (None, None)
|
||||
|
||||
# Take the default version from known symlink path
|
||||
default_path = os.path.join(craype_dir, "default")
|
||||
if os.path.islink(default_path):
|
||||
version = spack.version.Version(readlink(default_path))
|
||||
return (craype_type, version)
|
||||
|
||||
# If no default version, sort available versions and return latest
|
||||
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
|
||||
versions_available.sort(reverse=True)
|
||||
if not versions_available:
|
||||
return (craype_type, None)
|
||||
return (craype_type, versions_available[0])
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
"""
|
||||
Detect whether this system requires CrayPE module support.
|
||||
|
||||
Systems with newer CrayPE (21.10 for EX systems, future work for CS and
|
||||
XC systems) have compilers and MPI wrappers that can be used directly
|
||||
by path. These systems are considered ``linux`` platforms.
|
||||
|
||||
For systems running an older CrayPE, we detect the Cray platform based
|
||||
on the availability through `module` of the Cray programming
|
||||
environment. If this environment is available, we can use it to find
|
||||
compilers, target modules, etc. If the Cray programming environment is
|
||||
not available via modules, then we will treat it as a standard linux
|
||||
system, as the Cray compiler wrappers and other components of the Cray
|
||||
programming environment are irrelevant without module support.
|
||||
"""
|
||||
if "opt/cray" not in os.environ.get("MODULEPATH", ""):
|
||||
return False
|
||||
|
||||
craype_type, craype_version = cls.craype_type_and_version()
|
||||
if craype_type == "XC":
|
||||
return True
|
||||
if craype_type == "EX" and craype_version < spack.version.Version("21.10"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _default_target_from_env(self):
|
||||
"""Set and return the default CrayPE target loaded in a clean login
|
||||
session.
|
||||
|
||||
A bash subshell is launched with a wiped environment and the list of
|
||||
loaded modules is parsed for the first acceptable CrayPE target.
|
||||
"""
|
||||
# env -i /bin/bash -lc echo $CRAY_CPU_TARGET 2> /dev/null
|
||||
if getattr(self, "default", None) is None:
|
||||
bash = Executable("/bin/bash")
|
||||
output = bash(
|
||||
"--norc",
|
||||
"--noprofile",
|
||||
"-lc",
|
||||
"echo $CRAY_CPU_TARGET",
|
||||
env={"TERM": os.environ.get("TERM", "")},
|
||||
output=str,
|
||||
error=os.devnull,
|
||||
)
|
||||
|
||||
default_from_module = "".join(output.split()) # rm all whitespace
|
||||
if default_from_module:
|
||||
tty.debug("Found default module:%s" % default_from_module)
|
||||
return default_from_module
|
||||
else:
|
||||
front_end = archspec.cpu.host()
|
||||
# Look for the frontend architecture or closest ancestor
|
||||
# available in cray target modules
|
||||
avail = [_target_name_from_craype_target_name(x) for x in self._avail_targets()]
|
||||
for front_end_possibility in [front_end] + front_end.ancestors:
|
||||
if front_end_possibility.name in avail:
|
||||
tty.debug("using front-end architecture or available ancestor")
|
||||
return front_end_possibility.name
|
||||
else:
|
||||
tty.debug("using platform.machine as default")
|
||||
return platform.machine()
|
||||
|
||||
def _avail_targets(self):
|
||||
"""Return a list of available CrayPE CPU targets."""
|
||||
|
||||
def modules_in_output(output):
|
||||
"""Returns a list of valid modules parsed from modulecmd output"""
|
||||
return [i for i in re.split(r"\s\s+|\n", output)]
|
||||
|
||||
def target_names_from_modules(modules):
|
||||
# Craype- module prefixes that are not valid CPU targets.
|
||||
targets = []
|
||||
for mod in modules:
|
||||
if "craype-" in mod:
|
||||
name = mod[7:]
|
||||
name = name.split()[0]
|
||||
_n = name.replace("-", "_") # test for mic-knl/mic_knl
|
||||
is_target_name = name in archspec.cpu.TARGETS or _n in archspec.cpu.TARGETS
|
||||
is_cray_target_name = name in _craype_name_to_target_name
|
||||
if is_target_name or is_cray_target_name:
|
||||
targets.append(name)
|
||||
|
||||
return targets
|
||||
|
||||
def modules_from_listdir():
|
||||
craype_default_path = "/opt/cray/pe/craype/default/modulefiles"
|
||||
if os.path.isdir(craype_default_path):
|
||||
return os.listdir(craype_default_path)
|
||||
return []
|
||||
|
||||
if getattr(self, "_craype_targets", None) is None:
|
||||
strategies = [
|
||||
lambda: modules_in_output(module("avail", "-t", "craype-")),
|
||||
modules_from_listdir,
|
||||
]
|
||||
for available_craype_modules in strategies:
|
||||
craype_modules = available_craype_modules()
|
||||
craype_targets = target_names_from_modules(craype_modules)
|
||||
if craype_targets:
|
||||
self._craype_targets = craype_targets
|
||||
break
|
||||
else:
|
||||
# If nothing is found add platform.machine()
|
||||
# to avoid Spack erroring out
|
||||
self._craype_targets = [platform.machine()]
|
||||
|
||||
return self._craype_targets
|
||||
|
@@ -13,7 +13,6 @@
|
||||
r"\w[\w-]*": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["spec"],
|
||||
"properties": {"spec": {"type": "string"}, "path": {"type": "string"}},
|
||||
}
|
||||
},
|
||||
|
@@ -116,6 +116,8 @@ class Provenance(enum.IntEnum):
|
||||
PACKAGE_PY = enum.auto()
|
||||
# An installed spec
|
||||
INSTALLED = enum.auto()
|
||||
# lower provenance for installed git refs so concretizer prefers StandardVersion installs
|
||||
INSTALLED_GIT_VERSION = enum.auto()
|
||||
# A runtime injected from another package (e.g. a compiler)
|
||||
RUNTIME = enum.auto()
|
||||
|
||||
@@ -1433,14 +1435,16 @@ def condition(
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||
)
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
@@ -1689,43 +1693,19 @@ def external_packages(self):
|
||||
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
||||
]
|
||||
|
||||
selected_externals = set()
|
||||
external_specs = []
|
||||
if spec_filters:
|
||||
for current_filter in spec_filters:
|
||||
current_filter.factory = lambda: candidate_specs
|
||||
selected_externals.update(current_filter.selected_specs())
|
||||
|
||||
# Emit facts for externals specs. Note that "local_idx" is the index of the spec
|
||||
# in packages:<pkg_name>:externals. This means:
|
||||
#
|
||||
# packages:<pkg_name>:externals[local_idx].spec == spec
|
||||
external_versions = []
|
||||
for local_idx, spec in enumerate(candidate_specs):
|
||||
msg = f"{spec.name} available as external when satisfying {spec}"
|
||||
|
||||
if spec_filters and spec not in selected_externals:
|
||||
continue
|
||||
|
||||
if not spec.versions.concrete:
|
||||
warnings.warn(f"cannot use the external spec {spec}: needs a concrete version")
|
||||
continue
|
||||
|
||||
def external_imposition(input_spec, requirements):
|
||||
return requirements + [
|
||||
fn.attr("external_conditions_hold", input_spec.name, local_idx)
|
||||
]
|
||||
|
||||
try:
|
||||
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
||||
except (spack.error.SpecError, RuntimeError) as e:
|
||||
warnings.warn(f"while setting up external spec {spec}: {e}")
|
||||
continue
|
||||
external_versions.append((spec.version, local_idx))
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
external_specs.extend(current_filter.selected_specs())
|
||||
else:
|
||||
external_specs.extend(candidate_specs)
|
||||
|
||||
# Order the external versions to prefer more recent versions
|
||||
# even if specs in packages.yaml are not ordered that way
|
||||
external_versions = [
|
||||
(x.version, external_id) for external_id, x in enumerate(external_specs)
|
||||
]
|
||||
external_versions = [
|
||||
(v, idx, external_id)
|
||||
for idx, (v, external_id) in enumerate(sorted(external_versions, reverse=True))
|
||||
@@ -1735,6 +1715,19 @@ def external_imposition(input_spec, requirements):
|
||||
DeclaredVersion(version=version, idx=idx, origin=Provenance.EXTERNAL)
|
||||
)
|
||||
|
||||
# Declare external conditions with a local index into packages.yaml
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
|
||||
def external_imposition(input_spec, requirements):
|
||||
return requirements + [
|
||||
fn.attr("external_conditions_hold", input_spec.name, local_idx)
|
||||
]
|
||||
|
||||
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
@@ -1921,9 +1914,12 @@ def _spec_clauses(
|
||||
for flag_type, flags in spec.compiler_flags.items():
|
||||
for flag in flags:
|
||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||
clauses.append(f.node_flag_source(spec.name, flag_type, spec.name))
|
||||
if not spec.concrete and flag.propagate is True:
|
||||
clauses.append(f.node_flag_propagate(spec.name, flag_type))
|
||||
clauses.append(
|
||||
f.propagate(
|
||||
spec.name, fn.node_flag(flag_type, flag), fn.edge_types("link", "run")
|
||||
)
|
||||
)
|
||||
|
||||
# dependencies
|
||||
if spec.concrete:
|
||||
@@ -2076,7 +2072,7 @@ def define_ad_hoc_versions_from_specs(
|
||||
# best possible, so they're guaranteed to be used preferentially.
|
||||
version = s.versions.concrete
|
||||
|
||||
if version is None or any(v == version for v in self.possible_versions[s.name]):
|
||||
if version is None or (any((v == version) for v in self.possible_versions[s.name])):
|
||||
continue
|
||||
|
||||
if require_checksum and not _is_checksummed_git_version(version):
|
||||
@@ -2390,9 +2386,16 @@ def concrete_specs(self):
|
||||
# - Add OS to possible OS's
|
||||
for dep in spec.traverse():
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
||||
)
|
||||
if isinstance(dep.version, vn.GitVersion):
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(
|
||||
version=dep.version, idx=0, origin=Provenance.INSTALLED_GIT_VERSION
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
||||
)
|
||||
self.possible_oses.add(dep.os)
|
||||
|
||||
def define_concrete_input_specs(self, specs, possible):
|
||||
@@ -2444,7 +2447,7 @@ def setup(
|
||||
|
||||
if using_libc_compatibility():
|
||||
for libc in self.libcs:
|
||||
self.gen.fact(fn.allowed_libc(libc.name, libc.version))
|
||||
self.gen.fact(fn.host_libc(libc.name, libc.version))
|
||||
|
||||
if not allow_deprecated:
|
||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||
@@ -2741,8 +2744,6 @@ class _Head:
|
||||
node_compiler = fn.attr("node_compiler_set")
|
||||
node_compiler_version = fn.attr("node_compiler_version_set")
|
||||
node_flag = fn.attr("node_flag_set")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
propagate = fn.attr("propagate")
|
||||
|
||||
|
||||
@@ -2758,8 +2759,6 @@ class _Body:
|
||||
node_compiler = fn.attr("node_compiler")
|
||||
node_compiler_version = fn.attr("node_compiler_version")
|
||||
node_flag = fn.attr("node_flag")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
propagate = fn.attr("propagate")
|
||||
|
||||
|
||||
@@ -3346,6 +3345,8 @@ def hash(self, node, h):
|
||||
def node(self, node):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = spack.spec.Spec(node.pkg)
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def _arch(self, node):
|
||||
arch = self._specs[node].architecture
|
||||
@@ -3398,9 +3399,6 @@ def node_flag(self, node, flag_type, flag):
|
||||
def node_flag_source(self, node, flag_type, source):
|
||||
self._flag_sources[(node, flag_type)].add(source)
|
||||
|
||||
def no_flags(self, node, flag_type):
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx has been selected for this package."""
|
||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||
@@ -3493,7 +3491,7 @@ def reorder_flags(self):
|
||||
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
|
||||
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_compiler_flags))
|
||||
msg = f"{set(compiler_flags)} does not equal {set(ordered_compiler_flags)}"
|
||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||
@@ -3563,9 +3561,8 @@ def build_specs(self, function_tuples):
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
if name != "node_flag_source":
|
||||
continue
|
||||
if spec and spec.concrete and name != "node_flag_source":
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
@@ -3836,12 +3833,6 @@ class Solver:
|
||||
def __init__(self):
|
||||
self.driver = PyclingoDriver()
|
||||
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
|
||||
if spack.platforms.host().name == "cray":
|
||||
msg = (
|
||||
"The Cray platform, i.e. 'platform=cray', will be removed in Spack v0.23. "
|
||||
"All Cray machines will be then detected as 'platform=linux'."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
|
||||
@staticmethod
|
||||
def _check_input_and_extract_concrete_specs(specs):
|
||||
|
@@ -29,7 +29,6 @@
|
||||
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
|
||||
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
|
||||
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
|
||||
:- attr("no_flags", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
|
||||
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
|
||||
@@ -256,6 +255,7 @@ possible_version_weight(node(ID, Package), Weight)
|
||||
:- attr("version", node(ID, Package), Version),
|
||||
version_weight(node(ID, Package), Weight),
|
||||
not pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
||||
not pkg_fact(Package, version_declared(Version, Weight, "installed_git_version")),
|
||||
not build(node(ID, Package)),
|
||||
internal_error("Build version weight used for reused package").
|
||||
|
||||
@@ -964,12 +964,19 @@ pkg_fact(Package, variant_single_value("dev_path"))
|
||||
|
||||
% Propagation roots have a corresponding attr("propagate", ...)
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes).
|
||||
|
||||
|
||||
% Propagate an attribute along edges to child nodes
|
||||
propagate(ChildNode, PropagatedAttribute) :-
|
||||
propagate(ParentNode, PropagatedAttribute),
|
||||
depends_on(ParentNode, ChildNode).
|
||||
|
||||
propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :-
|
||||
propagate(ParentNode, PropagatedAttribute, edge_types(DepType1, DepType2)),
|
||||
depends_on(ParentNode, ChildNode),
|
||||
1 { attr("depends_on", ParentNode, ChildNode, DepType1); attr("depends_on", ParentNode, ChildNode, DepType2) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Activation of propagated values
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -995,6 +1002,33 @@ variant_is_propagated(PackageNode, Variant) :-
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
not propagate(PackageNode, variant_value(Variant, Value)).
|
||||
|
||||
%----
|
||||
% Flags
|
||||
%----
|
||||
|
||||
% A propagated flag implies:
|
||||
% 1. The same flag type is not set on this node
|
||||
% 2. This node has the same compiler as the propagation source
|
||||
|
||||
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag), SourceNode) :-
|
||||
propagate(node(PackageID, Package), node_flag(FlagType, Flag), _),
|
||||
not attr("node_flag_set", node(PackageID, Package), FlagType, _),
|
||||
% Same compiler as propagation source
|
||||
node_compiler(node(PackageID, Package), CompilerID),
|
||||
node_compiler(SourceNode, CompilerID),
|
||||
attr("propagate", SourceNode, node_flag(FlagType, Flag), _),
|
||||
node(PackageID, Package) != SourceNode,
|
||||
not runtime(Package).
|
||||
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- propagated_flag(PackageNode, node_flag(FlagType, Flag), _).
|
||||
attr("node_flag_source", PackageNode, FlagType, SourceNode) :- propagated_flag(PackageNode, node_flag(FlagType, _), SourceNode).
|
||||
|
||||
% Cannot propagate the same flag from two distinct sources
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source1)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source2)),
|
||||
Source1 < Source2.
|
||||
|
||||
%----
|
||||
% Compiler constraints
|
||||
%----
|
||||
@@ -1128,11 +1162,8 @@ target_weight(Target, 0)
|
||||
node_target_weight(PackageNode, MinWeight)
|
||||
:- attr("node", PackageNode),
|
||||
attr("node_target", PackageNode, Target),
|
||||
target(Target),
|
||||
MinWeight = #min { Weight : target_weight(Target, Weight) }.
|
||||
|
||||
:- attr("node_target", PackageNode, Target), not node_target_weight(PackageNode, _).
|
||||
|
||||
% compatibility rules for targets among nodes
|
||||
node_target_match(ParentNode, DependencyNode)
|
||||
:- attr("depends_on", ParentNode, DependencyNode, Type), Type != "build",
|
||||
@@ -1279,45 +1310,9 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
|
||||
% Compiler flags
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% propagate flags when compiler match
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType)
|
||||
:- same_compiler(PackageNode, DependencyNode),
|
||||
not attr("node_flag_set", DependencyNode, FlagType, _),
|
||||
flag_type(FlagType).
|
||||
|
||||
same_compiler(PackageNode, DependencyNode)
|
||||
:- depends_on(PackageNode, DependencyNode),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
node_compiler(DependencyNode, CompilerID),
|
||||
compiler_id(CompilerID).
|
||||
|
||||
node_flag_inherited(DependencyNode, FlagType, Flag)
|
||||
:- attr("node_flag_set", PackageNode, FlagType, Flag),
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
|
||||
% Ensure propagation
|
||||
:- node_flag_inherited(PackageNode, FlagType, Flag),
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
depends_on(Source1, Package),
|
||||
depends_on(Source2, Package),
|
||||
attr("node_flag_propagate", Source1, FlagType),
|
||||
attr("node_flag_propagate", Source2, FlagType),
|
||||
can_inherit_flags(Source1, Package, FlagType),
|
||||
can_inherit_flags(Source2, Package, FlagType),
|
||||
Source1 < Source2.
|
||||
|
||||
% remember where flags came from
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode)
|
||||
:- attr("node_flag_set", PackageNode, FlagType, _).
|
||||
|
||||
attr("node_flag_source", DependencyNode, FlagType, Q)
|
||||
:- attr("node_flag_source", PackageNode, FlagType, Q),
|
||||
node_flag_inherited(DependencyNode, FlagType, _),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag_set", PackageNode, FlagType, _).
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag", PackageNode, FlagType, _), attr("hash", PackageNode, _).
|
||||
|
||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||
attr("node_flag", PackageNode, FlagType, Flag)
|
||||
@@ -1337,15 +1332,8 @@ attr("node_flag_compiler_default", PackageNode)
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
|
||||
% if a flag is set to something or inherited, it's included
|
||||
% Flag set to something
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- node_flag_inherited(PackageNode, FlagType, Flag).
|
||||
|
||||
% if no node flags are set for a type, there are no flags.
|
||||
attr("no_flags", PackageNode, FlagType)
|
||||
:- not attr("node_flag", PackageNode, FlagType, _),
|
||||
attr("node", PackageNode),
|
||||
flag_type(FlagType).
|
||||
|
||||
#defined compiler_flag/3.
|
||||
|
||||
|
@@ -21,12 +21,20 @@ error(100, "Cannot reuse {0} since we cannot determine libc compatibility", Reus
|
||||
% A libc is needed in the DAG
|
||||
:- has_built_packages(), not provider(_, node(0, "libc")).
|
||||
|
||||
% The libc must be chosen among available ones
|
||||
% Non-libc reused specs must be host libc compatible. In case we build packages, we get a
|
||||
% host compatible libc provider from other rules. If nothing is built, there is no libc provider,
|
||||
% since it's pruned from reusable specs, meaning we have to explicitly impose reused specs are host
|
||||
% compatible.
|
||||
:- attr("hash", node(R, ReusedPackage), Hash),
|
||||
not provider(node(R, ReusedPackage), node(0, "libc")),
|
||||
not attr("compatible_libc", node(R, ReusedPackage), _, _).
|
||||
|
||||
% The libc provider must be one that a compiler can target
|
||||
:- has_built_packages(),
|
||||
provider(node(X, LibcPackage), node(0, "libc")),
|
||||
attr("node", node(X, LibcPackage)),
|
||||
attr("version", node(X, LibcPackage), LibcVersion),
|
||||
not allowed_libc(LibcPackage, LibcVersion).
|
||||
not host_libc(LibcPackage, LibcVersion).
|
||||
|
||||
% A built node must depend on libc
|
||||
:- build(PackageNode),
|
||||
|
@@ -12,7 +12,6 @@
|
||||
%=============================================================================
|
||||
|
||||
% macOS
|
||||
os_compatible("sequoia", "sonoma").
|
||||
os_compatible("sonoma", "ventura").
|
||||
os_compatible("ventura", "monterey").
|
||||
os_compatible("monterey", "bigsur").
|
||||
|
@@ -1287,6 +1287,102 @@ def copy(self, *args, **kwargs):
|
||||
return self.wrapped_obj.copy(*args, **kwargs)
|
||||
|
||||
|
||||
def tree(
|
||||
specs: List["spack.spec.Spec"],
|
||||
*,
|
||||
color: Optional[bool] = None,
|
||||
depth: bool = False,
|
||||
hashes: bool = False,
|
||||
hashlen: Optional[int] = None,
|
||||
cover: str = "nodes",
|
||||
indent: int = 0,
|
||||
format: str = DEFAULT_FORMAT,
|
||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
||||
show_types: bool = False,
|
||||
depth_first: bool = False,
|
||||
recurse_dependencies: bool = True,
|
||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||
key=id,
|
||||
) -> str:
|
||||
"""Prints out specs and their dependencies, tree-formatted with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
|
||||
Args:
|
||||
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
||||
use the default from llnl.tty.color
|
||||
depth: print the depth from the root
|
||||
hashes: if True, print the hash of each node
|
||||
hashlen: length of the hash to be printed
|
||||
cover: either "nodes" or "edges"
|
||||
indent: extra indentation for the tree being printed
|
||||
format: format to be used to print each node
|
||||
deptypes: dependency types to be represented in the tree
|
||||
show_types: if True, show the (merged) dependency type of a node
|
||||
depth_first: if True, traverse the DAG depth first when representing it as a tree
|
||||
recurse_dependencies: if True, recurse on dependencies
|
||||
status_fn: optional callable that takes a node as an argument and return its
|
||||
installation status
|
||||
prefix: optional callable that takes a node as an argument and return its
|
||||
installation prefix
|
||||
"""
|
||||
out = ""
|
||||
|
||||
if color is None:
|
||||
color = clr.get_color_when()
|
||||
|
||||
for d, dep_spec in traverse.traverse_tree(
|
||||
sorted(specs), cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
||||
):
|
||||
node = dep_spec.spec
|
||||
|
||||
if prefix is not None:
|
||||
out += prefix(node)
|
||||
out += " " * indent
|
||||
|
||||
if depth:
|
||||
out += "%-4d" % d
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
|
||||
|
||||
if show_types:
|
||||
if cover == "nodes":
|
||||
# when only covering nodes, we merge dependency types
|
||||
# from all dependents before showing them.
|
||||
depflag = 0
|
||||
for ds in node.edges_from_dependents():
|
||||
depflag |= ds.depflag
|
||||
else:
|
||||
# when covering edges or paths, we show dependency
|
||||
# types only for the edge through which we visited
|
||||
depflag = dep_spec.depflag
|
||||
|
||||
type_chars = dt.flag_to_chars(depflag)
|
||||
out += "[%s] " % type_chars
|
||||
|
||||
out += " " * d
|
||||
if d > 0:
|
||||
out += "^"
|
||||
out += node.format(format, color=color) + "\n"
|
||||
|
||||
# Check if we wanted just the first line
|
||||
if not recurse_dependencies:
|
||||
break
|
||||
|
||||
return out
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering(set_hash=False)
|
||||
class Spec:
|
||||
#: Cache for spec's prefix, computed lazily in the corresponding property
|
||||
@@ -2816,9 +2912,7 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
|
||||
# Check if we can produce an optimized binary (will throw if
|
||||
# there are declared inconsistencies)
|
||||
# No need on platform=cray because of the targeting modules
|
||||
if not self.satisfies("platform=cray"):
|
||||
self.architecture.target.optimization_flags(self.compiler)
|
||||
self.architecture.target.optimization_flags(self.compiler)
|
||||
|
||||
def _patches_assigned(self):
|
||||
"""Whether patches have been assigned to this spec by the concretizer."""
|
||||
@@ -4164,21 +4258,29 @@ def __getitem__(self, name: str):
|
||||
csv = query_parameters.pop().strip()
|
||||
query_parameters = re.split(r"\s*,\s*", csv)
|
||||
|
||||
# In some cases a package appears multiple times in the same DAG for *distinct*
|
||||
# specs. For example, a build-type dependency may itself depend on a package
|
||||
# the current spec depends on, but their specs may differ. Therefore we iterate
|
||||
# in an order here that prioritizes the build, test and runtime dependencies;
|
||||
# only when we don't find the package do we consider the full DAG.
|
||||
order = lambda: itertools.chain(
|
||||
self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
|
||||
self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
|
||||
self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
|
||||
self.traverse(deptype="link"),
|
||||
self.dependencies(deptype=dt.BUILD | dt.RUN | dt.TEST),
|
||||
self.traverse(), # fall back to a full search
|
||||
)
|
||||
|
||||
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
|
||||
# and prefer matches closest to the root.
|
||||
try:
|
||||
child: Spec = next(
|
||||
e.spec
|
||||
for e in itertools.chain(
|
||||
(e for e in order() if e.spec.name == name or name in e.virtuals),
|
||||
# for historical reasons
|
||||
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
|
||||
itertools.chain(
|
||||
# Regular specs
|
||||
(x for x in order() if x.name == name),
|
||||
(
|
||||
x
|
||||
for x in order()
|
||||
if (not x.virtual)
|
||||
and any(name in edge.virtuals for edge in x.edges_from_dependents())
|
||||
),
|
||||
(x for x in order() if (not x.virtual) and x.package.provides(name)),
|
||||
)
|
||||
)
|
||||
except StopIteration:
|
||||
@@ -4420,12 +4522,9 @@ def format_attribute(match_object: Match) -> str:
|
||||
if part.startswith("_"):
|
||||
raise SpecFormatStringError("Attempted to format private attribute")
|
||||
else:
|
||||
if isinstance(current, vt.VariantMap):
|
||||
if part == "variants" and isinstance(current, vt.VariantMap):
|
||||
# subscript instead of getattr for variant names
|
||||
try:
|
||||
current = current[part]
|
||||
except KeyError:
|
||||
raise SpecFormatStringError(f"Variant '{part}' does not exist")
|
||||
current = current[part]
|
||||
else:
|
||||
# aliases
|
||||
if part == "arch":
|
||||
@@ -4601,13 +4700,14 @@ def tree(
|
||||
recurse_dependencies: bool = True,
|
||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||
key=id,
|
||||
) -> str:
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation.
|
||||
"""Prints out this spec and its dependencies, tree-formatted with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
See multi-spec ``spack.spec.tree()`` function for details.
|
||||
|
||||
Args:
|
||||
specs: List of specs to format.
|
||||
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
||||
use the default from llnl.tty.color
|
||||
depth: print the depth from the root
|
||||
@@ -4625,60 +4725,23 @@ def tree(
|
||||
prefix: optional callable that takes a node as an argument and return its
|
||||
installation prefix
|
||||
"""
|
||||
out = ""
|
||||
|
||||
if color is None:
|
||||
color = clr.get_color_when()
|
||||
|
||||
for d, dep_spec in traverse.traverse_tree(
|
||||
[self], cover=cover, deptype=deptypes, depth_first=depth_first
|
||||
):
|
||||
node = dep_spec.spec
|
||||
|
||||
if prefix is not None:
|
||||
out += prefix(node)
|
||||
out += " " * indent
|
||||
|
||||
if depth:
|
||||
out += "%-4d" % d
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if status in list(InstallStatus):
|
||||
out += clr.colorize(status.value, color=color)
|
||||
elif status:
|
||||
out += clr.colorize("@g{[+]} ", color=color)
|
||||
else:
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
|
||||
|
||||
if show_types:
|
||||
if cover == "nodes":
|
||||
# when only covering nodes, we merge dependency types
|
||||
# from all dependents before showing them.
|
||||
depflag = 0
|
||||
for ds in node.edges_from_dependents():
|
||||
depflag |= ds.depflag
|
||||
else:
|
||||
# when covering edges or paths, we show dependency
|
||||
# types only for the edge through which we visited
|
||||
depflag = dep_spec.depflag
|
||||
|
||||
type_chars = dt.flag_to_chars(depflag)
|
||||
out += "[%s] " % type_chars
|
||||
|
||||
out += " " * d
|
||||
if d > 0:
|
||||
out += "^"
|
||||
out += node.format(format, color=color) + "\n"
|
||||
|
||||
# Check if we wanted just the first line
|
||||
if not recurse_dependencies:
|
||||
break
|
||||
|
||||
return out
|
||||
return tree(
|
||||
[self],
|
||||
color=color,
|
||||
depth=depth,
|
||||
hashes=hashes,
|
||||
hashlen=hashlen,
|
||||
cover=cover,
|
||||
indent=indent,
|
||||
format=format,
|
||||
deptypes=deptypes,
|
||||
show_types=show_types,
|
||||
depth_first=depth_first,
|
||||
recurse_dependencies=recurse_dependencies,
|
||||
status_fn=status_fn,
|
||||
prefix=prefix,
|
||||
key=key,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
@@ -212,10 +212,7 @@ def _expand_matrix_constraints(matrix_config):
|
||||
results = []
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||
|
||||
# Resolve abstract hashes so we can exclude by their concrete properties
|
||||
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
|
||||
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
@@ -231,7 +228,9 @@ def _expand_matrix_constraints(matrix_config):
|
||||
spack.variant.substitute_abstract_variants(test_spec)
|
||||
except spack.variant.UnknownVariantError:
|
||||
pass
|
||||
if any(test_spec.satisfies(x) for x in excludes):
|
||||
|
||||
# Resolve abstract hashes for exclusion criteria
|
||||
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
if sigil:
|
||||
|
@@ -371,6 +371,7 @@ def use_store(
|
||||
data.update(extra_data)
|
||||
|
||||
# Swap the store with the one just constructed and return it
|
||||
ensure_singleton_created()
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
||||
)
|
||||
|
@@ -2,16 +2,12 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.operating_systems
|
||||
@@ -25,9 +21,8 @@ def current_host_platform():
|
||||
"""Return the platform of the current host as detected by the
|
||||
'platform' stdlib package.
|
||||
"""
|
||||
if os.path.exists("/opt/cray/pe"):
|
||||
current_platform = spack.platforms.Cray()
|
||||
elif "Linux" in platform.system():
|
||||
current_platform = None
|
||||
if "Linux" in platform.system():
|
||||
current_platform = spack.platforms.Linux()
|
||||
elif "Darwin" in platform.system():
|
||||
current_platform = spack.platforms.Darwin()
|
||||
@@ -218,34 +213,7 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
||||
)
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
spec = Spec(
|
||||
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
||||
)
|
||||
spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}")
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
spec.concretize()
|
||||
assert spec.target == spec["pkg-b"].target == result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"versions,default,expected",
|
||||
[
|
||||
(["21.11", "21.9"], "21.11", False),
|
||||
(["21.11", "21.9"], "21.9", True),
|
||||
(["21.11", "21.9"], None, False),
|
||||
],
|
||||
)
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cray does not use windows")
|
||||
def test_cray_platform_detection(versions, default, expected, tmpdir, monkeypatch, working_env):
|
||||
ex_path = str(tmpdir.join("fake_craype_dir"))
|
||||
fs.mkdirp(ex_path)
|
||||
|
||||
with fs.working_dir(ex_path):
|
||||
for version in versions:
|
||||
fs.touch(version)
|
||||
if default:
|
||||
os.symlink(default, "default")
|
||||
|
||||
monkeypatch.setattr(spack.platforms.cray, "_ex_craype_dir", ex_path)
|
||||
os.environ["MODULEPATH"] = "/opt/cray/pe"
|
||||
|
||||
assert spack.platforms.cray.Cray.detect() == expected
|
||||
assert spec.target == spec["b"].target == result
|
||||
|
@@ -19,6 +19,8 @@
|
||||
(["missing-dependency"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# The package use a non existing variant in a depends_on directive
|
||||
(["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub pull request commit patch URL
|
||||
(["invalid-github-pull-commits-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub patch URL without full_index=1
|
||||
(["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has invalid GitLab patch URLs
|
||||
|
@@ -228,25 +228,3 @@ def test_source_is_disabled(mutable_config):
|
||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||
|
||||
|
||||
@pytest.mark.regression("45247")
|
||||
def test_use_store_does_not_try_writing_outside_root(tmp_path, monkeypatch, mutable_config):
|
||||
"""Tests that when we use the 'use_store' context manager, there is no attempt at creating
|
||||
a Store outside the given root.
|
||||
"""
|
||||
initial_store = mutable_config.get("config:install_tree:root")
|
||||
user_store = tmp_path / "store"
|
||||
|
||||
fn = spack.store.Store.__init__
|
||||
|
||||
def _checked_init(self, root, *args, **kwargs):
|
||||
fn(self, root, *args, **kwargs)
|
||||
assert self.root == str(user_store)
|
||||
|
||||
monkeypatch.setattr(spack.store.Store, "__init__", _checked_init)
|
||||
|
||||
spack.store.reinitialize()
|
||||
with spack.store.use_store(user_store):
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == str(user_store)
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == initial_store
|
||||
|
@@ -457,14 +457,14 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
|
||||
# a foobar=bar (parallel = False)
|
||||
# |
|
||||
# b (parallel =True)
|
||||
s = default_mock_concretization("pkg-a foobar=bar")
|
||||
s = default_mock_concretization("a foobar=bar")
|
||||
|
||||
spack.build_environment.set_package_py_globals(s.package, context=Context.BUILD)
|
||||
assert s["pkg-a"].package.module.make_jobs == 1
|
||||
assert s["a"].package.module.make_jobs == 1
|
||||
|
||||
spack.build_environment.set_package_py_globals(s["pkg-b"].package, context=Context.BUILD)
|
||||
assert s["pkg-b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
||||
parallel=s["pkg-b"].package.parallel
|
||||
spack.build_environment.set_package_py_globals(s["b"].package, context=Context.BUILD)
|
||||
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
||||
parallel=s["b"].package.parallel
|
||||
)
|
||||
|
||||
|
||||
@@ -556,24 +556,6 @@ def test_build_jobs_defaults():
|
||||
)
|
||||
|
||||
|
||||
def test_dirty_disable_module_unload(config, mock_packages, working_env, mock_module_cmd):
|
||||
"""Test that on CRAY platform 'module unload' is not called if the 'dirty'
|
||||
option is on.
|
||||
"""
|
||||
s = spack.spec.Spec("pkg-a").concretized()
|
||||
|
||||
# If called with "dirty" we don't unload modules, so no calls to the
|
||||
# `module` function on Cray
|
||||
spack.build_environment.setup_package(s.package, dirty=True)
|
||||
assert not mock_module_cmd.calls
|
||||
|
||||
# If called without "dirty" we unload modules on Cray
|
||||
spack.build_environment.setup_package(s.package, dirty=False)
|
||||
assert mock_module_cmd.calls
|
||||
assert any(("unload", "cray-libsci") == item[0] for item in mock_module_cmd.calls)
|
||||
assert any(("unload", "cray-mpich") == item[0] for item in mock_module_cmd.calls)
|
||||
|
||||
|
||||
class TestModuleMonkeyPatcher:
|
||||
def test_getting_attributes(self, default_mock_concretization):
|
||||
s = default_mock_concretization("libelf")
|
||||
|
@@ -97,7 +97,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestAutotoolsPackage:
|
||||
def test_with_or_without(self, default_mock_concretization):
|
||||
s = default_mock_concretization("pkg-a")
|
||||
s = default_mock_concretization("a")
|
||||
options = s.package.with_or_without("foo")
|
||||
|
||||
# Ensure that values that are not representing a feature
|
||||
@@ -129,7 +129,7 @@ def activate(value):
|
||||
assert "--without-lorem-ipsum" in options
|
||||
|
||||
def test_none_is_allowed(self, default_mock_concretization):
|
||||
s = default_mock_concretization("pkg-a foo=none")
|
||||
s = default_mock_concretization("a foo=none")
|
||||
options = s.package.with_or_without("foo")
|
||||
|
||||
# Ensure that values that are not representing a feature
|
||||
|
@@ -12,21 +12,21 @@
|
||||
|
||||
def test_build_task_errors(install_mockery):
|
||||
with pytest.raises(ValueError, match="must be a package"):
|
||||
inst.BuildTask("abc", None, False, 0, 0, 0, [])
|
||||
inst.BuildTask("abc", None, False, 0, 0, 0, set())
|
||||
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, [])
|
||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, set())
|
||||
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
with pytest.raises(ValueError, match="must have a build request"):
|
||||
inst.BuildTask(spec.package, None, False, 0, 0, 0, [])
|
||||
inst.BuildTask(spec.package, None, False, 0, 0, 0, set())
|
||||
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
with pytest.raises(inst.InstallError, match="Cannot create a build task"):
|
||||
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, [])
|
||||
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, set())
|
||||
|
||||
|
||||
def test_build_task_basics(install_mockery):
|
||||
@@ -36,8 +36,8 @@ def test_build_task_basics(install_mockery):
|
||||
|
||||
# Ensure key properties match expectations
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
|
||||
assert task.explicit # package was "explicitly" requested
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||
assert not task.explicit
|
||||
assert task.priority == len(task.uninstalled_deps)
|
||||
assert task.key == (task.priority, task.sequence)
|
||||
|
||||
@@ -58,7 +58,7 @@ def test_build_task_strings(install_mockery):
|
||||
|
||||
# Ensure key properties match expectations
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||
|
||||
# Cover __repr__
|
||||
irep = task.__repr__()
|
||||
|
@@ -828,14 +828,14 @@ def test_keep_and_replace(wrapper_environment):
|
||||
),
|
||||
(
|
||||
"config:flags:keep_werror:specific",
|
||||
["-Werror", "-Werror=specific", "-bah"],
|
||||
["-Werror=specific", "-bah"],
|
||||
["-Werror", "-Werror=specific", "-Werror-specific2", "-bah"],
|
||||
["-Wno-error", "-Werror=specific", "-Werror-specific2", "-bah"],
|
||||
["-Werror"],
|
||||
),
|
||||
(
|
||||
"config:flags:keep_werror:none",
|
||||
["-Werror", "-Werror=specific", "-bah"],
|
||||
["-bah", "-Wno-error", "-Wno-error=specific"],
|
||||
["-Wno-error", "-Wno-error=specific", "-bah"],
|
||||
["-Werror", "-Werror=specific"],
|
||||
),
|
||||
# check non-standard -Werror opts like -Werror-implicit-function-declaration
|
||||
@@ -848,13 +848,13 @@ def test_keep_and_replace(wrapper_environment):
|
||||
(
|
||||
"config:flags:keep_werror:specific",
|
||||
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
||||
["-Werror-implicit-function-declaration", "-bah", "-Wno-error"],
|
||||
["-Wno-error", "-Werror-implicit-function-declaration", "-bah"],
|
||||
["-Werror"],
|
||||
),
|
||||
(
|
||||
"config:flags:keep_werror:none",
|
||||
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
||||
["-bah", "-Wno-error=implicit-function-declaration"],
|
||||
["-Wno-error", "-bah", "-Wno-error=implicit-function-declaration"],
|
||||
["-Werror", "-Werror-implicit-function-declaration"],
|
||||
),
|
||||
],
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
@@ -11,15 +10,12 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.ci as ci
|
||||
import spack.ci_needs_workaround as cinw
|
||||
import spack.ci_optimization as ci_opt
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.paths as spack_paths
|
||||
import spack.util.git
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -203,164 +199,6 @@ def __call__(self, *args, **kwargs):
|
||||
assert "Unable to merge {0}".format(c1) in err
|
||||
|
||||
|
||||
@pytest.mark.parametrize("obj, proto", [({}, [])])
|
||||
def test_ci_opt_argument_checking(obj, proto):
|
||||
"""Check that matches() and subkeys() return False when `proto` is not a dict."""
|
||||
assert not ci_opt.matches(obj, proto)
|
||||
assert not ci_opt.subkeys(obj, proto)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("yaml", [{"extends": 1}])
|
||||
def test_ci_opt_add_extends_non_sequence(yaml):
|
||||
"""Check that add_extends() exits if 'extends' is not a sequence."""
|
||||
yaml_copy = yaml.copy()
|
||||
ci_opt.add_extends(yaml, None)
|
||||
assert yaml == yaml_copy
|
||||
|
||||
|
||||
def test_ci_workarounds():
|
||||
fake_root_spec = "x" * 544
|
||||
fake_spack_ref = "x" * 40
|
||||
|
||||
common_variables = {"SPACK_IS_PR_PIPELINE": "False"}
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
" && ".join(("pushd ./spack", 'git checkout "{ref}"'.format(ref=fake_spack_ref), "popd")),
|
||||
'. "./spack/share/spack/setup-env.sh"',
|
||||
]
|
||||
|
||||
def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dependencies):
|
||||
variables = common_variables.copy()
|
||||
variables["SPACK_JOB_SPEC_PKG_NAME"] = name
|
||||
|
||||
result = {
|
||||
"stage": stage,
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"artifacts": {
|
||||
"paths": ["jobs_scratch_dir", "cdash_report", name + ".spec.json", name],
|
||||
"when": "always",
|
||||
},
|
||||
"retry": {"max": 2, "when": ["always"]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
"script": ["spack ci rebuild"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result["extends"] = [".c0", ".c1"]
|
||||
else:
|
||||
variables["SPACK_ROOT_SPEC"] = fake_root_spec
|
||||
result["before_script"] = common_before_script
|
||||
|
||||
result["variables"] = variables
|
||||
|
||||
if use_dependencies:
|
||||
result["dependencies"] = list(deps) if use_artifact_buildcache else []
|
||||
else:
|
||||
result["needs"] = [{"job": dep, "artifacts": use_artifact_buildcache} for dep in deps]
|
||||
|
||||
return {name: result}
|
||||
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result["extends"] = ".c0"
|
||||
else:
|
||||
result["before_script"] = common_before_script
|
||||
|
||||
return {"rebuild-index": result}
|
||||
|
||||
def make_factored_jobs(optimize):
|
||||
return (
|
||||
{
|
||||
".c0": {"before_script": common_before_script},
|
||||
".c1": {"variables": {"SPACK_ROOT_SPEC": fake_root_spec}},
|
||||
}
|
||||
if optimize
|
||||
else {}
|
||||
)
|
||||
|
||||
def make_stage_list(num_build_stages):
|
||||
return {
|
||||
"stages": (
|
||||
["-".join(("stage", str(i))) for i in range(num_build_stages)]
|
||||
+ ["stage-rebuild-index"]
|
||||
)
|
||||
}
|
||||
|
||||
def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {}
|
||||
|
||||
result.update(
|
||||
make_build_job(
|
||||
"pkg-a", [], "stage-0", use_artifact_buildcache, optimize, use_dependencies
|
||||
)
|
||||
)
|
||||
|
||||
result.update(
|
||||
make_build_job(
|
||||
"pkg-b", ["pkg-a"], "stage-1", use_artifact_buildcache, optimize, use_dependencies
|
||||
)
|
||||
)
|
||||
|
||||
result.update(
|
||||
make_build_job(
|
||||
"pkg-c",
|
||||
["pkg-a", "pkg-b"],
|
||||
"stage-2",
|
||||
use_artifact_buildcache,
|
||||
optimize,
|
||||
use_dependencies,
|
||||
)
|
||||
)
|
||||
|
||||
result.update(make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies))
|
||||
|
||||
result.update(make_factored_jobs(optimize))
|
||||
|
||||
result.update(make_stage_list(3))
|
||||
|
||||
return result
|
||||
|
||||
# test every combination of:
|
||||
# use artifact buildcache: true or false
|
||||
# run optimization pass: true or false
|
||||
# convert needs to dependencies: true or false
|
||||
for use_ab in (False, True):
|
||||
original = make_yaml_obj(
|
||||
use_artifact_buildcache=use_ab, optimize=False, use_dependencies=False
|
||||
)
|
||||
|
||||
for opt, deps in itertools.product(*(((False, True),) * 2)):
|
||||
# neither optimizing nor converting needs->dependencies
|
||||
if not (opt or deps):
|
||||
# therefore, nothing to test
|
||||
continue
|
||||
|
||||
predicted = make_yaml_obj(
|
||||
use_artifact_buildcache=use_ab, optimize=opt, use_dependencies=deps
|
||||
)
|
||||
|
||||
actual = original.copy()
|
||||
if opt:
|
||||
actual = ci_opt.optimizer(actual)
|
||||
if deps:
|
||||
actual = cinw.needs_to_dependencies(actual)
|
||||
|
||||
predicted = syaml.dump_config(ci_opt.sort_yaml_obj(predicted), default_flow_style=True)
|
||||
actual = syaml.dump_config(ci_opt.sort_yaml_obj(actual), default_flow_style=True)
|
||||
|
||||
assert predicted == actual
|
||||
|
||||
|
||||
def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
|
||||
"""Test that given an active environment and list of touched pkgs,
|
||||
we get the right list of possibly-changed env specs"""
|
||||
|
@@ -106,24 +106,24 @@ def test_specs_staging(config, tmpdir):
|
||||
|
||||
"""
|
||||
builder = repo.MockRepositoryBuilder(tmpdir)
|
||||
builder.add_package("pkg-g")
|
||||
builder.add_package("pkg-f")
|
||||
builder.add_package("pkg-e")
|
||||
builder.add_package("pkg-d", dependencies=[("pkg-f", None, None), ("pkg-g", None, None)])
|
||||
builder.add_package("pkg-c")
|
||||
builder.add_package("pkg-b", dependencies=[("pkg-d", None, None), ("pkg-e", None, None)])
|
||||
builder.add_package("pkg-a", dependencies=[("pkg-b", None, None), ("pkg-c", None, None)])
|
||||
builder.add_package("g")
|
||||
builder.add_package("f")
|
||||
builder.add_package("e")
|
||||
builder.add_package("d", dependencies=[("f", None, None), ("g", None, None)])
|
||||
builder.add_package("c")
|
||||
builder.add_package("b", dependencies=[("d", None, None), ("e", None, None)])
|
||||
builder.add_package("a", dependencies=[("b", None, None), ("c", None, None)])
|
||||
|
||||
with repo.use_repositories(builder.root):
|
||||
spec_a = Spec("pkg-a").concretized()
|
||||
spec_a = Spec("a").concretized()
|
||||
|
||||
spec_a_label = ci._spec_ci_label(spec_a)
|
||||
spec_b_label = ci._spec_ci_label(spec_a["pkg-b"])
|
||||
spec_c_label = ci._spec_ci_label(spec_a["pkg-c"])
|
||||
spec_d_label = ci._spec_ci_label(spec_a["pkg-d"])
|
||||
spec_e_label = ci._spec_ci_label(spec_a["pkg-e"])
|
||||
spec_f_label = ci._spec_ci_label(spec_a["pkg-f"])
|
||||
spec_g_label = ci._spec_ci_label(spec_a["pkg-g"])
|
||||
spec_b_label = ci._spec_ci_label(spec_a["b"])
|
||||
spec_c_label = ci._spec_ci_label(spec_a["c"])
|
||||
spec_d_label = ci._spec_ci_label(spec_a["d"])
|
||||
spec_e_label = ci._spec_ci_label(spec_a["e"])
|
||||
spec_f_label = ci._spec_ci_label(spec_a["f"])
|
||||
spec_g_label = ci._spec_ci_label(spec_a["g"])
|
||||
|
||||
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
||||
|
||||
@@ -1290,7 +1290,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
spack:
|
||||
specs:
|
||||
- flatten-deps
|
||||
- pkg-a
|
||||
- a
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
@@ -1307,12 +1307,12 @@ def test_ci_generate_override_runner_attrs(
|
||||
- match:
|
||||
- dependency-install
|
||||
- match:
|
||||
- pkg-a
|
||||
- a
|
||||
build-job:
|
||||
tags:
|
||||
- specific-a-2
|
||||
- match:
|
||||
- pkg-a
|
||||
- a
|
||||
build-job-remove:
|
||||
tags:
|
||||
- toplevel2
|
||||
@@ -1372,8 +1372,8 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0"
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key.startswith("pkg-a"):
|
||||
# Make sure pkg-a's attributes override variables, and all the
|
||||
if ci_key.startswith("a"):
|
||||
# Make sure a's attributes override variables, and all the
|
||||
# scripts. Also, make sure the 'toplevel' tag doesn't
|
||||
# appear twice, but that a's specific extra tag does appear
|
||||
the_elt = yaml_contents[ci_key]
|
||||
@@ -1432,55 +1432,6 @@ def test_ci_generate_override_runner_attrs(
|
||||
assert the_elt["after_script"][0] == "post step one"
|
||||
|
||||
|
||||
def test_ci_generate_with_workarounds(
|
||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
||||
):
|
||||
"""Make sure the post-processing cli workarounds do what they should"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- callpath%gcc@=9.5
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match: ['%gcc@9.5']
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
enable-artifacts-buildcache: true
|
||||
"""
|
||||
)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile, "--dependencies")
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_one = False
|
||||
non_rebuild_keys = ["workflow", "stages", "variables", "rebuild-index"]
|
||||
|
||||
for ci_key in yaml_contents.keys():
|
||||
if ci_key not in non_rebuild_keys:
|
||||
found_one = True
|
||||
job_obj = yaml_contents[ci_key]
|
||||
assert "needs" not in job_obj
|
||||
assert "dependencies" in job_obj
|
||||
|
||||
assert found_one is True
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_ci_rebuild_index(
|
||||
tmpdir,
|
||||
@@ -1830,7 +1781,7 @@ def test_ci_generate_read_broken_specs_url(
|
||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
||||
):
|
||||
"""Verify that `broken-specs-url` works as intended"""
|
||||
spec_a = Spec("pkg-a")
|
||||
spec_a = Spec("a")
|
||||
spec_a.concretize()
|
||||
a_dag_hash = spec_a.dag_hash()
|
||||
|
||||
@@ -1856,7 +1807,7 @@ def test_ci_generate_read_broken_specs_url(
|
||||
spack:
|
||||
specs:
|
||||
- flatten-deps
|
||||
- pkg-a
|
||||
- a
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
@@ -1864,9 +1815,9 @@ def test_ci_generate_read_broken_specs_url(
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- pkg-a
|
||||
- a
|
||||
- flatten-deps
|
||||
- pkg-b
|
||||
- b
|
||||
- dependency-install
|
||||
build-job:
|
||||
tags:
|
||||
|
@@ -11,6 +11,7 @@
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd.clean
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.stage
|
||||
@@ -68,6 +69,20 @@ def test_function_calls(command_line, effects, mock_calls_for_clean):
|
||||
assert mock_calls_for_clean[name] == (1 if name in effects else 0)
|
||||
|
||||
|
||||
def test_env_aware_clean(mock_stage, install_mockery, mutable_mock_env_path, monkeypatch):
|
||||
e = ev.create("test", with_view=False)
|
||||
e.add("mpileaks")
|
||||
e.concretize()
|
||||
|
||||
def fail(*args, **kwargs):
|
||||
raise Exception("This should not have been called")
|
||||
|
||||
monkeypatch.setattr(spack.spec.Spec, "concretize", fail)
|
||||
|
||||
with e:
|
||||
clean("mpileaks")
|
||||
|
||||
|
||||
def test_remove_python_cache(tmpdir, monkeypatch):
|
||||
cache_files = ["file1.pyo", "file2.pyc"]
|
||||
source_file = "file1.py"
|
||||
|
@@ -81,14 +81,14 @@ def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
||||
"""
|
||||
# Initial sanity check: we are planning on choosing a non-default
|
||||
# value, so make sure that is in fact not the default.
|
||||
check_defaults = spack.cmd.parse_specs(["pkg-a"], concretize=True)[0]
|
||||
check_defaults = spack.cmd.parse_specs(["a"], concretize=True)[0]
|
||||
assert not check_defaults.satisfies("foobar=baz")
|
||||
|
||||
e = ev.create("test")
|
||||
e.add("pkg-a foobar=baz")
|
||||
e.add("a foobar=baz")
|
||||
e.concretize()
|
||||
with e:
|
||||
env_spec = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-a"])[0])
|
||||
env_spec = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
|
||||
assert env_spec.satisfies("foobar=baz")
|
||||
assert env_spec.concrete
|
||||
|
||||
@@ -96,12 +96,12 @@ def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
||||
e = ev.create("test")
|
||||
e.add("pkg-a foobar=baz")
|
||||
e.add("pkg-a foobar=fee")
|
||||
e.add("a foobar=baz")
|
||||
e.add("a foobar=fee")
|
||||
e.concretize()
|
||||
with e:
|
||||
with pytest.raises(ev.SpackEnvironmentError) as exc_info:
|
||||
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-a"])[0])
|
||||
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
|
||||
|
||||
assert "matches multiple specs" in exc_info.value.message
|
||||
|
||||
@@ -109,16 +109,16 @@ def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
||||
e = ev.create("test")
|
||||
e.add("pkg-b@0.9")
|
||||
e.add("pkg-a foobar=bar") # Depends on b, should choose b@1.0
|
||||
e.add("b@0.9")
|
||||
e.add("a foobar=bar") # Depends on b, should choose b@1.0
|
||||
e.concretize()
|
||||
with e:
|
||||
# This query matches the root b and b as a dependency of a. In that
|
||||
# case the root instance should be preferred.
|
||||
env_spec1 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-b"])[0])
|
||||
env_spec1 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b"])[0])
|
||||
assert env_spec1.satisfies("@0.9")
|
||||
|
||||
env_spec2 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-b@1.0"])[0])
|
||||
env_spec2 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b@1.0"])[0])
|
||||
assert env_spec2
|
||||
|
||||
|
||||
|
@@ -51,8 +51,8 @@ def test_concretize_root_test_dependencies_are_concretized(unify, mutable_mock_e
|
||||
|
||||
with ev.read("test") as e:
|
||||
e.unify = unify
|
||||
add("pkg-a")
|
||||
add("pkg-b")
|
||||
add("a")
|
||||
add("b")
|
||||
concretize("--test", "root")
|
||||
assert e.matching_spec("test-dependency")
|
||||
|
||||
|
@@ -15,26 +15,26 @@
|
||||
def test_env(mutable_mock_env_path, config, mock_packages):
|
||||
ev.create("test")
|
||||
with ev.read("test") as e:
|
||||
e.add("pkg-a@2.0 foobar=bar ^pkg-b@1.0")
|
||||
e.add("pkg-a@1.0 foobar=bar ^pkg-b@0.9")
|
||||
e.add("a@2.0 foobar=bar ^b@1.0")
|
||||
e.add("a@1.0 foobar=bar ^b@0.9")
|
||||
e.concretize()
|
||||
e.write()
|
||||
|
||||
|
||||
def test_deconcretize_dep(test_env):
|
||||
with ev.read("test") as e:
|
||||
deconcretize("-y", "pkg-b@1.0")
|
||||
deconcretize("-y", "b@1.0")
|
||||
specs = [s for s, _ in e.concretized_specs()]
|
||||
|
||||
assert len(specs) == 1
|
||||
assert specs[0].satisfies("pkg-a@1.0")
|
||||
assert specs[0].satisfies("a@1.0")
|
||||
|
||||
|
||||
def test_deconcretize_all_dep(test_env):
|
||||
with ev.read("test") as e:
|
||||
with pytest.raises(SpackCommandError):
|
||||
deconcretize("-y", "pkg-b")
|
||||
deconcretize("-y", "--all", "pkg-b")
|
||||
deconcretize("-y", "b")
|
||||
deconcretize("-y", "--all", "b")
|
||||
specs = [s for s, _ in e.concretized_specs()]
|
||||
|
||||
assert len(specs) == 0
|
||||
@@ -42,27 +42,27 @@ def test_deconcretize_all_dep(test_env):
|
||||
|
||||
def test_deconcretize_root(test_env):
|
||||
with ev.read("test") as e:
|
||||
output = deconcretize("-y", "--root", "pkg-b@1.0")
|
||||
output = deconcretize("-y", "--root", "b@1.0")
|
||||
assert "No matching specs to deconcretize" in output
|
||||
assert len(e.concretized_order) == 2
|
||||
|
||||
deconcretize("-y", "--root", "pkg-a@2.0")
|
||||
deconcretize("-y", "--root", "a@2.0")
|
||||
specs = [s for s, _ in e.concretized_specs()]
|
||||
|
||||
assert len(specs) == 1
|
||||
assert specs[0].satisfies("pkg-a@1.0")
|
||||
assert specs[0].satisfies("a@1.0")
|
||||
|
||||
|
||||
def test_deconcretize_all_root(test_env):
|
||||
with ev.read("test") as e:
|
||||
with pytest.raises(SpackCommandError):
|
||||
deconcretize("-y", "--root", "pkg-a")
|
||||
deconcretize("-y", "--root", "a")
|
||||
|
||||
output = deconcretize("-y", "--root", "--all", "pkg-b")
|
||||
output = deconcretize("-y", "--root", "--all", "b")
|
||||
assert "No matching specs to deconcretize" in output
|
||||
assert len(e.concretized_order) == 2
|
||||
|
||||
deconcretize("-y", "--root", "--all", "pkg-a")
|
||||
deconcretize("-y", "--root", "--all", "a")
|
||||
specs = [s for s, _ in e.concretized_specs()]
|
||||
|
||||
assert len(specs) == 0
|
||||
|
@@ -125,18 +125,8 @@ def print_spack_cc(*args):
|
||||
print(os.environ.get("CC", ""))
|
||||
|
||||
|
||||
# `module unload cray-libsci` in test environment causes failure
|
||||
# It does not fail for actual installs
|
||||
# build_environment.py imports module directly, so we monkeypatch it there
|
||||
# rather than in module_cmd
|
||||
def mock_module_noop(*args):
|
||||
pass
|
||||
|
||||
|
||||
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
|
||||
monkeypatch.setattr(os, "execvp", print_spack_cc)
|
||||
monkeypatch.setattr(spack.build_environment, "module", mock_module_noop)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
output = dev_build("-b", "edit", "--drop-in", "sh", "dev-build-test-install@0.0.0")
|
||||
assert "lib/spack/env" in output
|
||||
|
46
lib/spack/spack/test/cmd/edit.py
Normal file
46
lib/spack/spack/test/cmd/edit.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.editor
|
||||
from spack.build_systems import autotools, cmake
|
||||
from spack.main import SpackCommand
|
||||
|
||||
edit = SpackCommand("edit")
|
||||
|
||||
|
||||
def test_edit_packages(monkeypatch, mock_packages: spack.repo.RepoPath):
|
||||
"""Test spack edit a b"""
|
||||
path_a = mock_packages.filename_for_package_name("a")
|
||||
path_b = mock_packages.filename_for_package_name("b")
|
||||
called = False
|
||||
|
||||
def editor(*args: str, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
assert args[0] == path_a
|
||||
assert args[1] == path_b
|
||||
|
||||
monkeypatch.setattr(spack.util.editor, "editor", editor)
|
||||
edit("a", "b")
|
||||
assert called
|
||||
|
||||
|
||||
def test_edit_files(monkeypatch):
|
||||
"""Test spack edit --build-system autotools cmake"""
|
||||
called = False
|
||||
|
||||
def editor(*args: str, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
assert os.path.samefile(args[0], autotools.__file__)
|
||||
assert os.path.samefile(args[1], cmake.__file__)
|
||||
|
||||
monkeypatch.setattr(spack.util.editor, "editor", editor)
|
||||
edit("--build-system", "autotools", "cmake")
|
||||
assert called
|
@@ -28,9 +28,7 @@
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml
|
||||
from spack.cmd.env import _env_create
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
from spack.spec import Spec
|
||||
@@ -503,7 +501,7 @@ def test_env_install_two_specs_same_dep(install_mockery, mock_fetch, tmpdir, cap
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- pkg-a
|
||||
- a
|
||||
- depb
|
||||
"""
|
||||
)
|
||||
@@ -522,8 +520,8 @@ def test_env_install_two_specs_same_dep(install_mockery, mock_fetch, tmpdir, cap
|
||||
depb = spack.store.STORE.db.query_one("depb", installed=True)
|
||||
assert depb, "Expected depb to be installed"
|
||||
|
||||
a = spack.store.STORE.db.query_one("pkg-a", installed=True)
|
||||
assert a, "Expected pkg-a to be installed"
|
||||
a = spack.store.STORE.db.query_one("a", installed=True)
|
||||
assert a, "Expected a to be installed"
|
||||
|
||||
|
||||
def test_remove_after_concretize():
|
||||
@@ -827,7 +825,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- pkg-a
|
||||
- a
|
||||
view: true
|
||||
"""
|
||||
)
|
||||
@@ -835,9 +833,9 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
||||
external_config = io.StringIO(
|
||||
"""\
|
||||
packages:
|
||||
pkg-a:
|
||||
a:
|
||||
externals:
|
||||
- spec: pkg-a@2.0
|
||||
- spec: a@2.0
|
||||
prefix: {a_prefix}
|
||||
buildable: false
|
||||
""".format(
|
||||
@@ -1739,17 +1737,6 @@ def test_env_include_concrete_env_yaml(env_name):
|
||||
assert test.path in combined_yaml["include_concrete"]
|
||||
|
||||
|
||||
@pytest.mark.regression("45766")
|
||||
@pytest.mark.parametrize("format", ["v1", "v2", "v3"])
|
||||
def test_env_include_concrete_old_env(format, tmpdir):
|
||||
lockfile = os.path.join(spack.paths.test_path, "data", "legacy_env", f"{format}.lock")
|
||||
# create an env from old .lock file -- this does not update the format
|
||||
env("create", "old-env", lockfile)
|
||||
env("create", "--include-concrete", "old-env", "test")
|
||||
|
||||
assert ev.read("old-env").all_specs() == ev.read("test").all_specs()
|
||||
|
||||
|
||||
def test_env_bad_include_concrete_env():
|
||||
with pytest.raises(ev.SpackEnvironmentError):
|
||||
env("create", "--include-concrete", "nonexistant_env", "combined_env")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user