Compare commits
673 Commits
bugfix/ext
...
no-circula
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50396b41b4 | ||
|
|
b4a2e2b82c | ||
|
|
d3f8bf1b96 | ||
|
|
fd5cf345e1 | ||
|
|
ca265ea0c2 | ||
|
|
7a92579480 | ||
|
|
190dfd0269 | ||
|
|
b549548f69 | ||
|
|
79268cedd2 | ||
|
|
2004171b7e | ||
|
|
06312ddf18 | ||
|
|
3a0db729c7 | ||
|
|
9759331f43 | ||
|
|
ceca97518a | ||
|
|
1929d5e3de | ||
|
|
238e9c3613 | ||
|
|
d43e7cb5cd | ||
|
|
51a037d52a | ||
|
|
c91f8c2f14 | ||
|
|
04ad42e5ee | ||
|
|
d02c71e443 | ||
|
|
ca6e178890 | ||
|
|
b145085fff | ||
|
|
3a4b96e61c | ||
|
|
36d87a4783 | ||
|
|
6d2645f73b | ||
|
|
44f7363fbe | ||
|
|
9d936a2a75 | ||
|
|
18438c395d | ||
|
|
28a30bcea6 | ||
|
|
536c7709c2 | ||
|
|
e28738a01e | ||
|
|
5f8c706128 | ||
|
|
558695793f | ||
|
|
b43a27674b | ||
|
|
3d961b9a1f | ||
|
|
d100ac8923 | ||
|
|
e8fa8c5f01 | ||
|
|
be6bb413df | ||
|
|
d23c302ca2 | ||
|
|
ed0c1cea91 | ||
|
|
ffc42e287d | ||
|
|
ba0d182e10 | ||
|
|
8d8104de2c | ||
|
|
7975e0afbc | ||
|
|
4a43522763 | ||
|
|
30343d65ba | ||
|
|
38c1639c9c | ||
|
|
be5033c869 | ||
|
|
eb67497020 | ||
|
|
371268a9aa | ||
|
|
344e8d142a | ||
|
|
161fbfadf4 | ||
|
|
3304312b26 | ||
|
|
3279ee7068 | ||
|
|
8f3f838763 | ||
|
|
09864d00c5 | ||
|
|
0f7fa27327 | ||
|
|
a27139c081 | ||
|
|
4d4338db16 | ||
|
|
6d64ffdd1a | ||
|
|
e9ea9e2316 | ||
|
|
2a5509ea90 | ||
|
|
b9d027f0cc | ||
|
|
6d54dc2a44 | ||
|
|
6cd9cbf578 | ||
|
|
72e81796d1 | ||
|
|
f116e6762a | ||
|
|
c74bbc6723 | ||
|
|
492a603d5e | ||
|
|
dab68687bd | ||
|
|
1a32cea114 | ||
|
|
aaec76652b | ||
|
|
f748911ea0 | ||
|
|
e60e74694f | ||
|
|
2ef026b8c6 | ||
|
|
a6c2569b18 | ||
|
|
5483b5ff99 | ||
|
|
2b78a7099d | ||
|
|
34cdc6f52b | ||
|
|
3aafdb06c9 | ||
|
|
4a22c1c699 | ||
|
|
f021479ef0 | ||
|
|
3f374fb62f | ||
|
|
949be42f32 | ||
|
|
e5abd5abc1 | ||
|
|
4473d5d811 | ||
|
|
c3e61664cf | ||
|
|
c3217775c3 | ||
|
|
58a7e11db9 | ||
|
|
ac570bb5c4 | ||
|
|
b2c806f6fc | ||
|
|
bd613b3124 | ||
|
|
f1b85bc653 | ||
|
|
e1fab4dd51 | ||
|
|
a924079f66 | ||
|
|
c5aff1d412 | ||
|
|
6c9602ee64 | ||
|
|
64327bfef0 | ||
|
|
05c3cb7cc9 | ||
|
|
c87b251639 | ||
|
|
f2332a17d3 | ||
|
|
c7f24a132e | ||
|
|
96a7af1dd2 | ||
|
|
db1caa9e92 | ||
|
|
237d26460d | ||
|
|
1020b65297 | ||
|
|
dceb4c9d65 | ||
|
|
50570ea334 | ||
|
|
7e836b925d | ||
|
|
cec3da61d2 | ||
|
|
7ed53cf083 | ||
|
|
bdc3ab5b54 | ||
|
|
5a985e33ea | ||
|
|
9817593c1c | ||
|
|
1cc78dac38 | ||
|
|
e2c5fe4aa3 | ||
|
|
1bf87dbb5d | ||
|
|
ffe527b141 | ||
|
|
642c5b876b | ||
|
|
8b7bd6dc74 | ||
|
|
2f97dc7aa6 | ||
|
|
958d542f81 | ||
|
|
b1aae1c2ed | ||
|
|
690f9d69fe | ||
|
|
a78c16a609 | ||
|
|
7bb2d3cca3 | ||
|
|
7216050dd3 | ||
|
|
2f26e422d6 | ||
|
|
3477d578a3 | ||
|
|
aa8e1ba606 | ||
|
|
08e007e9a6 | ||
|
|
d6fb65ebc6 | ||
|
|
2b5be919dd | ||
|
|
cc2dff48a8 | ||
|
|
22922bf74c | ||
|
|
8a02463d7d | ||
|
|
c6465bd9bd | ||
|
|
9025caed6e | ||
|
|
7056a4bffd | ||
|
|
d2aa8466eb | ||
|
|
6e4684fbca | ||
|
|
fcbf617d38 | ||
|
|
1f8b55a021 | ||
|
|
b5f8ed07fb | ||
|
|
65bd9b9ac5 | ||
|
|
6250d84b41 | ||
|
|
99056e03bd | ||
|
|
1db849ee5f | ||
|
|
2f82b213df | ||
|
|
2a5f0158bc | ||
|
|
21a1f7dd97 | ||
|
|
4b5ed94af4 | ||
|
|
06788019a4 | ||
|
|
cab8f795a7 | ||
|
|
2db38bfa38 | ||
|
|
ea029442e6 | ||
|
|
43e38d0d12 | ||
|
|
2522c8b754 | ||
|
|
f64cb29aea | ||
|
|
80e30222e1 | ||
|
|
55356e9edb | ||
|
|
eec09f791d | ||
|
|
9032179b34 | ||
|
|
45b40115fb | ||
|
|
e030833129 | ||
|
|
e055dc0e64 | ||
|
|
c45729cba1 | ||
|
|
b02b2f0f00 | ||
|
|
3ded50cc8c | ||
|
|
a7280cd5bb | ||
|
|
2837b47ea5 | ||
|
|
ea2c61c683 | ||
|
|
217b34825a | ||
|
|
17d90f4cbc | ||
|
|
7a5bd8cac4 | ||
|
|
333da47dc7 | ||
|
|
8b68b4ae72 | ||
|
|
40a3fdefa8 | ||
|
|
a61474f2c1 | ||
|
|
b95a75779b | ||
|
|
0ff6a1bd1c | ||
|
|
f9cfc2f57e | ||
|
|
f4fb20e27e | ||
|
|
3ff5d49102 | ||
|
|
238d4f72f5 | ||
|
|
c5bc469eeb | ||
|
|
b01e7dca9d | ||
|
|
c62906f781 | ||
|
|
94bac8d6dd | ||
|
|
cd9c9b47e8 | ||
|
|
8560295529 | ||
|
|
fd248ad0b8 | ||
|
|
0578ccc0e6 | ||
|
|
fcc2ab8b4b | ||
|
|
76511ac039 | ||
|
|
e4547982b3 | ||
|
|
80722fbaa3 | ||
|
|
c2fa444344 | ||
|
|
088ece1219 | ||
|
|
fcdd275564 | ||
|
|
b6d6a1ab2c | ||
|
|
7efcb5ae73 | ||
|
|
06e6389258 | ||
|
|
b7f0f7879d | ||
|
|
f7cfbe2702 | ||
|
|
1466f8d602 | ||
|
|
9fdb36585f | ||
|
|
1f0a9fdc11 | ||
|
|
0baba62900 | ||
|
|
4a0e34eda8 | ||
|
|
88f2f59d92 | ||
|
|
c1d11975f5 | ||
|
|
cca56291c6 | ||
|
|
ef155c16f0 | ||
|
|
0952d314bd | ||
|
|
f29ac34558 | ||
|
|
47628521b9 | ||
|
|
62da76cb5d | ||
|
|
65c914fff7 | ||
|
|
dd7b2deb47 | ||
|
|
7d72aeb4fe | ||
|
|
43d97afd8b | ||
|
|
39f13853ba | ||
|
|
d65b9c559a | ||
|
|
bde5720a81 | ||
|
|
2371ec7497 | ||
|
|
aa3b6e598f | ||
|
|
8035eeb36d | ||
|
|
57383a2294 | ||
|
|
9517dab409 | ||
|
|
84fa4e6c4c | ||
|
|
f33507961d | ||
|
|
46010ef1e1 | ||
|
|
f9d9d43b63 | ||
|
|
db8f115013 | ||
|
|
09b5476049 | ||
|
|
14c4896ec2 | ||
|
|
b5ef5c2eb5 | ||
|
|
675afd884d | ||
|
|
0f5482dc9a | ||
|
|
069e5f874c | ||
|
|
cad01a03cb | ||
|
|
f10f8ed013 | ||
|
|
d991ec90e3 | ||
|
|
8353d1539f | ||
|
|
bf3d18bf06 | ||
|
|
0e69710f41 | ||
|
|
ec62150ed7 | ||
|
|
d37dc37504 | ||
|
|
38d37897d4 | ||
|
|
606eef43bd | ||
|
|
02a30f8d95 | ||
|
|
7e054cb7fc | ||
|
|
d29cb87ecc | ||
|
|
f8c0d9728d | ||
|
|
f5bff16745 | ||
|
|
2d1cb6d64a | ||
|
|
c6e35da2c7 | ||
|
|
f1cd327186 | ||
|
|
391ad8cec4 | ||
|
|
2c668f4bfd | ||
|
|
52fdae83f0 | ||
|
|
0ea81affd1 | ||
|
|
ddc6e233c7 | ||
|
|
7ee4499f2b | ||
|
|
641adae961 | ||
|
|
aed77efb9a | ||
|
|
ab6499ce1e | ||
|
|
412bec45aa | ||
|
|
c3dcd94ebc | ||
|
|
cb8f642297 | ||
|
|
92f19c8491 | ||
|
|
f3f8b31be5 | ||
|
|
63cadf04ea | ||
|
|
541e75350f | ||
|
|
8806e74419 | ||
|
|
381f8161b1 | ||
|
|
884123b7ce | ||
|
|
35aa875762 | ||
|
|
9b0e79fcab | ||
|
|
8ba0faa9ee | ||
|
|
d464185bba | ||
|
|
7f4d71252b | ||
|
|
7950311767 | ||
|
|
194f9a9ca9 | ||
|
|
a72021fd63 | ||
|
|
d910b3725b | ||
|
|
99f209019e | ||
|
|
c11a4e0ad3 | ||
|
|
4a429ec315 | ||
|
|
eadccfe332 | ||
|
|
dfab5b5ceb | ||
|
|
862029215c | ||
|
|
559c3de213 | ||
|
|
e3bf7358d7 | ||
|
|
b58ec9e2b9 | ||
|
|
95b5d54129 | ||
|
|
bcce9c3e9c | ||
|
|
4c05fe569c | ||
|
|
e550665df7 | ||
|
|
d92d34b162 | ||
|
|
f27be808a4 | ||
|
|
855d3519b6 | ||
|
|
37f232e319 | ||
|
|
ac1c29eac0 | ||
|
|
56072172f5 | ||
|
|
64d957dece | ||
|
|
3edc85ec21 | ||
|
|
d8006a9495 | ||
|
|
a2cfc07412 | ||
|
|
1295ea5d40 | ||
|
|
4664b3cd1e | ||
|
|
dc7e0e3ef6 | ||
|
|
9aa615aa98 | ||
|
|
85b6bf99a4 | ||
|
|
78ec3d5662 | ||
|
|
a7b5f2ef39 | ||
|
|
f71701f39d | ||
|
|
54008a2342 | ||
|
|
1670c325c6 | ||
|
|
534a994b4c | ||
|
|
359efca201 | ||
|
|
65809140f3 | ||
|
|
3f1622f9e7 | ||
|
|
8332a59194 | ||
|
|
05abea3a3a | ||
|
|
e7fc9ea243 | ||
|
|
eea3ea7675 | ||
|
|
895ac2626d | ||
|
|
94dc86e163 | ||
|
|
729b1c9fa6 | ||
|
|
82b7fe649f | ||
|
|
76417d6ac6 | ||
|
|
fe995542ab | ||
|
|
8f5209063d | ||
|
|
241a8f6be6 | ||
|
|
a8a0a6916a | ||
|
|
8d10dce651 | ||
|
|
a2938c9348 | ||
|
|
8017f4b55b | ||
|
|
588d2e295f | ||
|
|
c10b84f08d | ||
|
|
99044bedd7 | ||
|
|
3afe6f1adc | ||
|
|
fcd9038225 | ||
|
|
9d82024f1a | ||
|
|
bcefe6a73e | ||
|
|
87562042df | ||
|
|
10d10b612a | ||
|
|
69dd742dc9 | ||
|
|
18efd817b1 | ||
|
|
65a5369d6a | ||
|
|
f66ec00fa9 | ||
|
|
f63fb2f521 | ||
|
|
dfa00f5a8d | ||
|
|
6602780657 | ||
|
|
8420c610fa | ||
|
|
b139cab687 | ||
|
|
99fcc57607 | ||
|
|
5a394d37b7 | ||
|
|
472074cb7c | ||
|
|
45c8d7f457 | ||
|
|
dce1f01f1a | ||
|
|
03cc83bc67 | ||
|
|
f452741e3d | ||
|
|
99b68e646d | ||
|
|
f78c8265f4 | ||
|
|
5d3efbba14 | ||
|
|
7423f52cd3 | ||
|
|
43d93f7773 | ||
|
|
f8dec3e87f | ||
|
|
ef06b9db5b | ||
|
|
c64c9649be | ||
|
|
2c6b52f137 | ||
|
|
33422acef0 | ||
|
|
428f635142 | ||
|
|
c6c74e98ff | ||
|
|
d9b438ec76 | ||
|
|
c6ee30497c | ||
|
|
1270ae1526 | ||
|
|
d15fead30c | ||
|
|
23aaaf2d28 | ||
|
|
56f9c76394 | ||
|
|
49cda811fc | ||
|
|
a97312535a | ||
|
|
a0180ef741 | ||
|
|
d640a573a8 | ||
|
|
b3679406d0 | ||
|
|
587488882a | ||
|
|
a17844a367 | ||
|
|
093a37750c | ||
|
|
173cc7e973 | ||
|
|
451e3ff50b | ||
|
|
523c4c2b63 | ||
|
|
35e5a916bc | ||
|
|
1374577659 | ||
|
|
4c017403db | ||
|
|
fdfda72371 | ||
|
|
efa1dba9e4 | ||
|
|
2a7ae2a700 | ||
|
|
a1b4e1bccd | ||
|
|
066ec31604 | ||
|
|
bb1888dbd4 | ||
|
|
bc17b6cefb | ||
|
|
46a0cd8e55 | ||
|
|
b2ceb23165 | ||
|
|
2fad966139 | ||
|
|
0b01c8c950 | ||
|
|
613d0b7e8e | ||
|
|
21c29ee375 | ||
|
|
e236339e5a | ||
|
|
7a03525c35 | ||
|
|
17ca86a309 | ||
|
|
ce71a38703 | ||
|
|
12c23f2724 | ||
|
|
b8ae0fbbf4 | ||
|
|
6b5c86e0be | ||
|
|
1ed1b49c9b | ||
|
|
4265d5e111 | ||
|
|
8c0fb91d4e | ||
|
|
567532b9e5 | ||
|
|
47d59e571e | ||
|
|
93ff19c9b7 | ||
|
|
2167cbf72c | ||
|
|
7a5e527cab | ||
|
|
a25868594c | ||
|
|
dd5263694b | ||
|
|
5fca1c9aff | ||
|
|
1d7393c281 | ||
|
|
f0bc551718 | ||
|
|
46b9a09843 | ||
|
|
c0898565b9 | ||
|
|
3018e7f63d | ||
|
|
dfa1a42420 | ||
|
|
2c8ab85e6a | ||
|
|
b2505aed5c | ||
|
|
7847d4332e | ||
|
|
70bcbba5eb | ||
|
|
0182603609 | ||
|
|
bf1b846f26 | ||
|
|
d06fd26c9a | ||
|
|
5d2c9636ff | ||
|
|
63e4406514 | ||
|
|
d56380fc07 | ||
|
|
f89cc96b0c | ||
|
|
cf952d41d8 | ||
|
|
5f737c5a71 | ||
|
|
a845b1f984 | ||
|
|
b8d059e8f4 | ||
|
|
1006c77374 | ||
|
|
38d4fd7711 | ||
|
|
643ce586de | ||
|
|
5b3b0130f2 | ||
|
|
55c77d659e | ||
|
|
fe1c105161 | ||
|
|
09f2b6f5f5 | ||
|
|
73fe21ba41 | ||
|
|
81fb87cedf | ||
|
|
7de39c44b1 | ||
|
|
c902e27e52 | ||
|
|
65b991a4c5 | ||
|
|
65520311a6 | ||
|
|
def79731d0 | ||
|
|
0fd3c9f451 | ||
|
|
c5883fffd7 | ||
|
|
4bf964e6b3 | ||
|
|
bcc0fda4e2 | ||
|
|
69987fd323 | ||
|
|
9a16234ed4 | ||
|
|
bd198312c9 | ||
|
|
7f9af8d4a0 | ||
|
|
793a7bc6a9 | ||
|
|
376afd631c | ||
|
|
e287c6ac4b | ||
|
|
e864744b60 | ||
|
|
5b3af53b10 | ||
|
|
44c22a54c9 | ||
|
|
f97f37550a | ||
|
|
0e4ee3d352 | ||
|
|
05fc800db9 | ||
|
|
2387c116ad | ||
|
|
6411cbd803 | ||
|
|
8ea366b33f | ||
|
|
9a2fbf373c | ||
|
|
9e1fef8813 | ||
|
|
f8a6e3ad90 | ||
|
|
0706919b09 | ||
|
|
b9b93ce272 | ||
|
|
87cb9760ce | ||
|
|
d472e28bfe | ||
|
|
dbc81549db | ||
|
|
dc00c4fdae | ||
|
|
f1b9da16c8 | ||
|
|
93ce943301 | ||
|
|
632b36ab5d | ||
|
|
94c76c5823 | ||
|
|
45b4cedb7e | ||
|
|
6d0a8f78b2 | ||
|
|
409cf185ce | ||
|
|
602984460d | ||
|
|
62b1d52a1e | ||
|
|
790bd175e0 | ||
|
|
2f057d729d | ||
|
|
a124185090 | ||
|
|
c5235bbe86 | ||
|
|
e715901cb2 | ||
|
|
1db914f567 | ||
|
|
688dae7058 | ||
|
|
ddb460ec8d | ||
|
|
703e5fe44a | ||
|
|
c601bdf7bf | ||
|
|
778dddc523 | ||
|
|
acc19ad34f | ||
|
|
f4826e1b33 | ||
|
|
05ff7e657c | ||
|
|
839a14c0ba | ||
|
|
9aafbec121 | ||
|
|
20071e0c04 | ||
|
|
51bb2f23a3 | ||
|
|
2060d51bd0 | ||
|
|
e5af0ccc09 | ||
|
|
284859e742 | ||
|
|
37e77f7a15 | ||
|
|
d2432e1ba4 | ||
|
|
5809ba0e3f | ||
|
|
95e294b2e8 | ||
|
|
cdaac58488 | ||
|
|
13389f7eb8 | ||
|
|
4964633614 | ||
|
|
381bedf369 | ||
|
|
6811651a0f | ||
|
|
22aada0e20 | ||
|
|
4a71020cd2 | ||
|
|
294e6f80a0 | ||
|
|
cc2d0eade6 | ||
|
|
f00e411287 | ||
|
|
da0a6280ac | ||
|
|
6ee6844473 | ||
|
|
69822b0d82 | ||
|
|
93eecae0c3 | ||
|
|
61f5d85525 | ||
|
|
a90e86de75 | ||
|
|
7247a493ab | ||
|
|
cd8ec60ae9 | ||
|
|
fe597dfb0c | ||
|
|
c721aab006 | ||
|
|
6a08e9ed08 | ||
|
|
7637efb363 | ||
|
|
b31f1b0353 | ||
|
|
497682260f | ||
|
|
e47beceb8a | ||
|
|
067976f4b8 | ||
|
|
1263b5c444 | ||
|
|
90f0a8eacc | ||
|
|
61a7420c94 | ||
|
|
39a1f1462b | ||
|
|
6de5d8e68c | ||
|
|
b0f2523350 | ||
|
|
bc8cc39871 | ||
|
|
b36a8f4f2e | ||
|
|
0a952f8b7b | ||
|
|
26a0384171 | ||
|
|
d18cccf7c5 | ||
|
|
fbe6b4b486 | ||
|
|
5fe08a5647 | ||
|
|
ac2fc4f271 | ||
|
|
93430496e2 | ||
|
|
901b31a7aa | ||
|
|
0cec2d3110 | ||
|
|
40e4884e8b | ||
|
|
f18425a51f | ||
|
|
472893c5c4 | ||
|
|
289bbf74f6 | ||
|
|
a0182c069f | ||
|
|
4ecb6ecaff | ||
|
|
d5193f73d8 | ||
|
|
1aab5bb9f2 | ||
|
|
8dda4ff60b | ||
|
|
0811f81a09 | ||
|
|
af74680405 | ||
|
|
d1715c5fdf | ||
|
|
b245f1ece1 | ||
|
|
e10c47c53d | ||
|
|
0697d20fd4 | ||
|
|
fd4f905ce5 | ||
|
|
d36c7b20d2 | ||
|
|
2948248d7a | ||
|
|
850c54c3b1 | ||
|
|
90fb16033e | ||
|
|
13a68d547d | ||
|
|
857ae5a74b | ||
|
|
b3124bff7c | ||
|
|
5c4137baf1 | ||
|
|
a9dcd4c01e | ||
|
|
2cd7322b11 | ||
|
|
f9e9ecd0c1 | ||
|
|
d756034161 | ||
|
|
2460c4fc28 | ||
|
|
6e39efbb9a | ||
|
|
277e35c3b0 | ||
|
|
bf1b2a828c | ||
|
|
2913f8b42b | ||
|
|
57f4c922e9 | ||
|
|
8d82fecce9 | ||
|
|
96126cbf17 | ||
|
|
6ecb57e91f | ||
|
|
a75af62fe3 | ||
|
|
e4e02dbeae | ||
|
|
3efa4ee26f | ||
|
|
f4c3d98064 | ||
|
|
a4cec82841 | ||
|
|
3812edd0db | ||
|
|
3ea9c8529a | ||
|
|
ed28797f83 | ||
|
|
eadb6ae774 | ||
|
|
a5d35c3077 | ||
|
|
3d811617e6 | ||
|
|
03224e52d2 | ||
|
|
4ebe57cd64 | ||
|
|
343cd04a54 | ||
|
|
ed45385b7b | ||
|
|
8a3b596042 | ||
|
|
1792327874 | ||
|
|
d0dedda9a9 | ||
|
|
368dde437a | ||
|
|
022a2d2eaf | ||
|
|
5f8511311c | ||
|
|
2d2c591633 | ||
|
|
d49c992b23 | ||
|
|
f1392bbd49 | ||
|
|
c14dc2f56a | ||
|
|
0f54a63dfd | ||
|
|
f11778bb02 | ||
|
|
3437926cde | ||
|
|
d25375da55 | ||
|
|
0b302034df | ||
|
|
b9f69a8dfa | ||
|
|
c3e9aeeed0 | ||
|
|
277234c044 | ||
|
|
0077a25639 | ||
|
|
6a3e20023e | ||
|
|
f92987b11f | ||
|
|
61f198e8af | ||
|
|
4d90d663a3 | ||
|
|
7a7e9eb04f | ||
|
|
3ea4b53bf6 | ||
|
|
ad0d908d8d | ||
|
|
9a793fe01b | ||
|
|
6dd3c78924 | ||
|
|
5b080d63fb | ||
|
|
ea8e3c27a4 | ||
|
|
30ffd6d33e | ||
|
|
c1aec72f60 | ||
|
|
cfd0dc6d89 | ||
|
|
60b3d32072 | ||
|
|
5142ebdd57 | ||
|
|
6b782e6d7e | ||
|
|
168bced888 | ||
|
|
489de38890 | ||
|
|
2a20520cc8 | ||
|
|
ae6213b193 | ||
|
|
bb1cd430c0 | ||
|
|
36877abd02 | ||
|
|
62db008e42 | ||
|
|
b10d75b1c6 | ||
|
|
078767946c | ||
|
|
9ca7165ef0 | ||
|
|
d1d668a9d5 | ||
|
|
284c3a3fd8 | ||
|
|
ec89c47aee | ||
|
|
49114ffff7 | ||
|
|
05fd39477e |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
24
.github/workflows/bootstrap.yml
vendored
24
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
|
||||
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@@ -20,12 +20,6 @@ jobs:
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
audit-ancient-python:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
needs: [ changes ]
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
python_version: 2.7
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
@@ -41,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -85,7 +79,7 @@ jobs:
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
||||
8
.github/workflows/setup_git.ps1
vendored
8
.github/workflows/setup_git.ps1
vendored
@@ -1,15 +1,9 @@
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
# (c) 2022 Lawrence Livermore National Laboratory
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
|
||||
4
.github/workflows/setup_git.sh
vendored
4
.github/workflows/setup_git.sh
vendored
@@ -2,10 +2,6 @@
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
# create a local pr base branch
|
||||
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||
|
||||
64
.github/workflows/unit_tests.yaml
vendored
64
.github/workflows/unit_tests.yaml
vendored
@@ -11,39 +11,46 @@ concurrency:
|
||||
jobs:
|
||||
# Run unit tests with different configurations on linux
|
||||
ubuntu:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: 2.7
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.11'
|
||||
concretizer: original
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
concretizer: clingo
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -52,24 +59,11 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist
|
||||
# Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due
|
||||
# to bugs on an unmaintained version of the package when used with xdist.
|
||||
if [[ ${{ matrix.python-version }} != "2.7" ]]; then
|
||||
pip install --upgrade pytest-cov
|
||||
fi
|
||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click==8.0.4" "black<=21.12b0"
|
||||
fi
|
||||
- name: Pin pathlib for Python 2.7
|
||||
if: ${{ matrix.python-version == 2.7 }}
|
||||
run: |
|
||||
pip install -U pathlib2==2.3.6 toml
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist pytest-cov
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -82,6 +76,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap now
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests
|
||||
env:
|
||||
@@ -89,7 +84,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -99,10 +94,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -138,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -150,25 +145,22 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
spack -d bootstrap now --dev
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf kcov
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||
@@ -193,10 +185,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
14
.github/workflows/valid-style.yml
vendored
14
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -28,23 +28,23 @@ jobs:
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
||||
175
.github/workflows/windows_python.yml
vendored
175
.github/workflows/windows_python.yml
vendored
@@ -10,15 +10,15 @@ concurrency:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -26,13 +26,11 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
dir
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -41,10 +39,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -52,12 +50,11 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -66,10 +63,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -78,81 +75,81 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
make-installer:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: make-installer
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
# TODO: johnwparent - reduce the size of the installer operations
|
||||
# make-installer:
|
||||
# runs-on: windows-latest
|
||||
# steps:
|
||||
# - name: Disable Windows Symlinks
|
||||
# run: |
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Add Light and Candle to Path
|
||||
# run: |
|
||||
# $env:WIX >> $GITHUB_PATH
|
||||
# - name: Run Installer
|
||||
# run: |
|
||||
# ./share/spack/qa/setup_spack_installer.ps1
|
||||
# spack make-installer -s . -g SILENT pkg
|
||||
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
# execute-installer:
|
||||
# needs: make-installer
|
||||
# runs-on: windows-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# shell: pwsh
|
||||
# steps:
|
||||
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Setup installer directory
|
||||
# run: |
|
||||
# mkdir -p spack_installer
|
||||
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute Bundled Installer
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute MSI
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
|
||||
274
CHANGELOG.md
274
CHANGELOG.md
@@ -1,16 +1,284 @@
|
||||
# v0.19.0 (2022-11-11)
|
||||
|
||||
`v0.19.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Package requirements**
|
||||
|
||||
Spack's traditional [package preferences](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-preferences)
|
||||
are soft, but we've added hard requriements to `packages.yaml` and `spack.yaml`
|
||||
(#32528, #32369). Package requirements use the same syntax as specs:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
```
|
||||
|
||||
More details in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements).
|
||||
|
||||
2. **Environment UI Improvements**
|
||||
|
||||
* Fewer surprising modifications to `spack.yaml` (#33711):
|
||||
|
||||
* `spack install` in an environment will no longer add to the `specs:` list; you'll
|
||||
need to either use `spack add <spec>` or `spack install --add <spec>`.
|
||||
|
||||
* Similarly, `spack uninstall` will not remove from your environment's `specs:`
|
||||
list; you'll need to use `spack remove` or `spack uninstall --remove`.
|
||||
|
||||
This will make it easier to manage an environment, as there is clear separation
|
||||
between the stack to be installed (`spack.yaml`/`spack.lock`) and which parts of
|
||||
it should be installed (`spack install` / `spack uninstall`).
|
||||
|
||||
* `concretizer:unify:true` is now the default mode for new environments (#31787)
|
||||
|
||||
We see more users creating `unify:true` environments now. Users who need
|
||||
`unify:false` can add it to their environment to get the old behavior. This will
|
||||
concretize every spec in the environment independently.
|
||||
|
||||
* Include environment configuration from URLs (#29026, [docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#included-configurations))
|
||||
|
||||
You can now include configuration in your environment directly from a URL:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
include:
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
```
|
||||
|
||||
4. **Multiple Build Systems**
|
||||
|
||||
An increasing number of packages in the ecosystem need the ability to support
|
||||
multiple build systems (#30738, [docs](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#multiple-build-systems)),
|
||||
either across versions, across platforms, or within the same version of the software.
|
||||
This has been hard to support through multiple inheritance, as methods from different
|
||||
build system superclasses would conflict. `package.py` files can now define separate
|
||||
builder classes with installation logic for different build systems, e.g.:
|
||||
|
||||
```python
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
```
|
||||
|
||||
5. **Compiler and variant propagation**
|
||||
|
||||
Currently, compiler flags and variants are inconsistent: compiler flags set for a
|
||||
package are inherited by its dependencies, while variants are not. We should have
|
||||
these be consistent by allowing for inheritance to be enabled or disabled for both
|
||||
variants and compiler flags.
|
||||
|
||||
Example syntax:
|
||||
- `package ++variant`:
|
||||
enabled variant that will be propagated to dependencies
|
||||
- `package +variant`:
|
||||
enabled variant that will NOT be propagated to dependencies
|
||||
- `package ~~variant`:
|
||||
disabled variant that will be propagated to dependencies
|
||||
- `package ~variant`:
|
||||
disabled variant that will NOT be propagated to dependencies
|
||||
- `package cflags==-g`:
|
||||
`cflags` will be propagated to dependencies
|
||||
- `package cflags=-g`:
|
||||
`cflags` will NOT be propagated to dependencies
|
||||
|
||||
Syntax for non-boolan variants is similar to compiler flags. More in the docs for
|
||||
[variants](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#variants) and [compiler flags](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#compiler-flags).
|
||||
|
||||
6. **Enhancements to git version specifiers**
|
||||
|
||||
* `v0.18.0` added the ability to use git commits as versions. You can now use the
|
||||
`git.` prefix to specify git tags or branches as versions. All of these are valid git
|
||||
versions in `v0.19` (#31200):
|
||||
|
||||
```console
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # raw commit
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234 # commit with git prefix
|
||||
foo@git.develop # the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
```
|
||||
|
||||
* `v0.19` also gives you more control over how Spack interprets git versions, in case
|
||||
Spack cannot detect the version from the git repository. You can suffix a git
|
||||
version with `=<version>` to force Spack to concretize it as a particular version
|
||||
(#30998, #31914, #32257):
|
||||
|
||||
```console
|
||||
# use mybranch, but treat it as version 3.2 for version comparison
|
||||
foo@git.mybranch=3.2
|
||||
|
||||
# use the given commit, but treat it as develop for version comparison
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop
|
||||
```
|
||||
|
||||
More in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier)
|
||||
|
||||
7. **Changes to Cray EX Support**
|
||||
|
||||
Cray machines have historically had their own "platform" within Spack, because we
|
||||
needed to go through the module system to leverage compilers and MPI installations on
|
||||
these machines. The Cray EX programming environment now provides standalone `craycc`
|
||||
executables and proper `mpicc` wrappers, so Spack can treat EX machines like Linux
|
||||
with extra packages (#29392).
|
||||
|
||||
We expect this to greatly reduce bugs, as external packages and compilers can now be
|
||||
used by prefix instead of through modules. We will also no longer be subject to
|
||||
reproducibility issues when modules change from Cray PE release to release and from
|
||||
site to site. This also simplifies dealing with the underlying Linux OS on cray
|
||||
systems, as Spack will properly model the machine's OS as either SuSE or RHEL.
|
||||
|
||||
8. **Improvements to tests and testing in CI**
|
||||
|
||||
* `spack ci generate --tests` will generate a `.gitlab-ci.yml` file that not only does
|
||||
builds but also runs tests for built packages (#27877). Public GitHub pipelines now
|
||||
also run tests in CI.
|
||||
|
||||
* `spack test run --explicit` will only run tests for packages that are explicitly
|
||||
installed, instead of all packages.
|
||||
|
||||
9. **Experimental binding link model**
|
||||
|
||||
You can add a new option to `config.yaml` to make Spack embed absolute paths to
|
||||
needed shared libraries in ELF executables and shared libraries on Linux (#31948, [docs](
|
||||
https://spack.readthedocs.io/en/latest/config_yaml.html#shared-linking-bind)):
|
||||
|
||||
```yaml
|
||||
config:
|
||||
shared_linking:
|
||||
type: rpath
|
||||
bind: true
|
||||
```
|
||||
|
||||
This can improve launch time at scale for parallel applications, and it can make
|
||||
installations less susceptible to environment variables like `LD_LIBRARY_PATH`, even
|
||||
especially when dealing with external libraries that use `RUNPATH`. You can think of
|
||||
this as a faster, even higher-precedence version of `RPATH`.
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `spack spec` prints dependencies more legibly. Dependencies in the output now appear
|
||||
at the *earliest* level of indentation possible (#33406)
|
||||
* You can override `package.py` attributes like `url`, directly in `packages.yaml`
|
||||
(#33275, [docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#assigning-package-attributes))
|
||||
* There are a number of new architecture-related format strings you can use in Spack
|
||||
configuration files to specify paths (#29810, [docs](
|
||||
https://spack.readthedocs.io/en/latest/configuration.html#config-file-variables))
|
||||
* Spack now supports bootstrapping Clingo on Windows (#33400)
|
||||
* There is now support for an `RPATH`-like library model on Windows (#31930)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Major performance improvements for installation from binary caches (#27610, #33628,
|
||||
#33636, #33608, #33590, #33496)
|
||||
* Test suite can now be parallelized using `xdist` (used in GitHub Actions) (#32361)
|
||||
* Reduce lock contention for parallel builds in environments (#31643)
|
||||
|
||||
## New binary caches and stacks
|
||||
|
||||
* We now build nearly all of E4S with `oneapi` in our buildcache (#31781, #31804,
|
||||
#31804, #31803, #31840, #31991, #32117, #32107, #32239)
|
||||
* Added 3 new machine learning-centric stacks to binary cache: `x86_64_v3`, CUDA, ROCm
|
||||
(#31592, #33463)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Support for Python 3.5 is dropped (#31908). Only Python 2.7 and 3.6+ are officially
|
||||
supported.
|
||||
|
||||
* This is the last Spack release that will support Python 2 (#32615). Spack `v0.19`
|
||||
will emit a deprecation warning if you run it with Python 2, and Python 2 support will
|
||||
soon be removed from the `develop` branch.
|
||||
|
||||
* `LD_LIBRARY_PATH` is no longer set by default by `spack load` or module loads.
|
||||
|
||||
Setting `LD_LIBRARY_PATH` in Spack environments/modules can cause binaries from
|
||||
outside of Spack to crash, and Spack's own builds use `RPATH` and do not need
|
||||
`LD_LIBRARY_PATH` set in order to run. If you still want the old behavior, you
|
||||
can run these commands to configure Spack to set `LD_LIBRARY_PATH`:
|
||||
|
||||
```console
|
||||
spack config add modules:prefix_inspections:lib64:[LD_LIBRARY_PATH]
|
||||
spack config add modules:prefix_inspections:lib:[LD_LIBRARY_PATH]
|
||||
```
|
||||
|
||||
* The `spack:concretization:[together|separately]` has been removed after being
|
||||
deprecated in `v0.18`. Use `concretizer:unify:[true|false]`.
|
||||
* `config:module_roots` is no longer supported after being deprecated in `v0.18`. Use
|
||||
configuration in module sets instead (#28659, [docs](
|
||||
https://spack.readthedocs.io/en/latest/module_file_support.html)).
|
||||
* `spack activate` and `spack deactivate` are no longer supported, having been
|
||||
deprecated in `v0.18`. Use an environment with a view instead of
|
||||
activating/deactivating ([docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#configuration-in-spack-yaml)).
|
||||
* The old YAML format for buildcaches is now deprecated (#33707). If you are using an
|
||||
old buildcache with YAML metadata you will need to regenerate it with JSON metadata.
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are deprecated in favor of
|
||||
`spack bootstrap enable` and `spack bootstrap disable` and will be removed in `v0.20`.
|
||||
(#33600)
|
||||
* The `graviton2` architecture has been renamed to `neoverse_n1`, and `graviton3`
|
||||
is now `neoverse_v1`. Buildcaches using the old architecture names will need to be rebuilt.
|
||||
* The terms `blacklist` and `whitelist` have been replaced with `include` and `exclude`
|
||||
in all configuration files (#31569). You can use `spack config update` to
|
||||
automatically fix your configuration files.
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Permission setting on installation now handles effective uid properly (#19980)
|
||||
* `buildable:true` for an MPI implementation now overrides `buildable:false` for `mpi` (#18269)
|
||||
* Improved error messages when attempting to use an unconfigured compiler (#32084)
|
||||
* Do not punish explicitly requested compiler mismatches in the solver (#30074)
|
||||
* `spack stage`: add missing --fresh and --reuse (#31626)
|
||||
* Fixes for adding build system executables like `cmake` to package scope (#31739)
|
||||
* Bugfix for binary relocation with aliased strings produced by newer `binutils` (#32253)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,751 total packages, 335 new since `v0.18.0`
|
||||
* 141 new Python packages
|
||||
* 89 new R packages
|
||||
* 303 people contributed to this release
|
||||
* 287 committers to packages
|
||||
* 57 committers to core
|
||||
|
||||
|
||||
# v0.18.1 (2022-07-19)
|
||||
|
||||
### Spack Bugfixes
|
||||
* Fix several bugs related to bootstrapping (#30834,#31042,#31180)
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
Python 2 and Python 3 (#31092)
|
||||
* Fixed compiler flags for oneAPI and DPC++ (#30856)
|
||||
* Fixed several issues related to concretization (#31142,#31153,#31170,#31226)
|
||||
* Improved support for Cray manifest file and `spack external find` (#31144,#31201,#31173,#31186)
|
||||
* Assign a version to openSUSE Tumbleweed according to the GLIBC version
|
||||
in the system (#19895)
|
||||
in the system (#19895)
|
||||
* Improved Dockerfile generation for `spack containerize` (#29741,#31321)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
|
||||
### Package updates
|
||||
* WarpX: add v22.06, fixed libs property (#30866,#31102)
|
||||
|
||||
@@ -10,8 +10,8 @@ For more on Spack's release structure, see
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.17.x | :white_check_mark: |
|
||||
| 0.16.x | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
||||
@@ -31,13 +31,11 @@ import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 5)
|
||||
min_python3 = (3, 6)
|
||||
|
||||
if sys.version_info[:2] < (2, 7) or (
|
||||
sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3
|
||||
):
|
||||
if sys.version_info[:2] < min_python3:
|
||||
v_info = sys.version_info[:3]
|
||||
msg = "Spack requires Python 2.7 or %d.%d or higher " % min_python3
|
||||
msg = "Spack requires Python %d.%d or higher " % min_python3
|
||||
msg += "You are running spack with Python %d.%d.%d." % v_info
|
||||
sys.exit(msg)
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}"
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
@@ -214,4 +214,8 @@ config:
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
binary_index_ttl: 600
|
||||
binary_index_ttl: 600
|
||||
|
||||
flags:
|
||||
# Whether to keep -Werror flags active in package builds.
|
||||
keep_werror: 'none'
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _analyze:
|
||||
|
||||
=======
|
||||
Analyze
|
||||
=======
|
||||
|
||||
|
||||
The analyze command is a front-end to various tools that let us analyze
|
||||
package installations. Each analyzer is a module for a different kind
|
||||
of analysis that can be done on a package installation, including (but not
|
||||
limited to) binary, log, or text analysis. Thus, the analyze command group
|
||||
allows you to take an existing package install, choose an analyzer,
|
||||
and extract some output for the package using it.
|
||||
|
||||
|
||||
-----------------
|
||||
Analyzer Metadata
|
||||
-----------------
|
||||
|
||||
For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the
|
||||
value that you specify in your spack config at ``config:analyzers_dir``.
|
||||
For example, here we see the results of running an analysis on zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
This means that you can always find analyzer output in this folder, and it
|
||||
is organized with the same logic as the package install it was run for.
|
||||
If you want to customize this top level folder, simply provide the ``--path``
|
||||
argument to ``spack analyze run``. The nested organization will be maintained
|
||||
within your custom root.
|
||||
|
||||
-----------------
|
||||
Listing Analyzers
|
||||
-----------------
|
||||
|
||||
If you aren't familiar with Spack's analyzers, you can quickly list those that
|
||||
are available:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze list-analyzers
|
||||
install_files : install file listing read from install_manifest.json
|
||||
environment_variables : environment variables parsed from spack-build-env.txt
|
||||
config_args : config args loaded from spack-configure-args.txt
|
||||
libabigail : Application Binary Interface (ABI) features for objects
|
||||
|
||||
|
||||
In the above, the first three are fairly simple - parsing metadata files from
|
||||
a package install directory to save
|
||||
|
||||
-------------------
|
||||
Analyzing a Package
|
||||
-------------------
|
||||
|
||||
The analyze command, akin to install, will accept a package spec to perform
|
||||
an analysis for. The package must be installed. Let's walk through an example
|
||||
with zlib. We first ask to analyze it. However, since we have more than one
|
||||
install, we are asked to disambiguate:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib
|
||||
==> Error: zlib matches multiple packages.
|
||||
Matching packages:
|
||||
fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake
|
||||
sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
Use a more specific spec.
|
||||
|
||||
|
||||
We can then specify the spec version that we want to analyze:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib/fz2bs56
|
||||
|
||||
If you don't provide any specific analyzer names, by default all analyzers
|
||||
(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not
|
||||
have any result, it will be skipped. For example, here is a result running for
|
||||
zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/
|
||||
spack-analyzer-environment-variables.json
|
||||
spack-analyzer-install-files.json
|
||||
spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run
|
||||
spack analyze on libabigail (already installed) _using_ libabigail1
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --analyzer abigail libabigail
|
||||
|
||||
|
||||
.. _analyze_monitoring:
|
||||
|
||||
----------------------
|
||||
Monitoring An Analysis
|
||||
----------------------
|
||||
|
||||
For any kind of analysis, you can
|
||||
use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
as a server to upload the same run metadata to. You can
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
You should first export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor wget
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget
|
||||
|
||||
If your server doesn't have authentication, you can skip it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-disable-auth wget
|
||||
|
||||
Regardless of your choice, when you run analyze on an installed package (whether
|
||||
it was installed with ``--monitor`` or not, you'll see the results generating as they did
|
||||
before, and a message that the monitor server was pinged:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze --monitor wget
|
||||
...
|
||||
==> Sending result for wget bin/wget to monitor.
|
||||
@@ -1114,21 +1114,21 @@ set of arbitrary versions, such as ``@1.0,1.5,1.7`` (``1.0``, ``1.5``,
|
||||
or ``1.7``). When you supply such a specifier to ``spack install``,
|
||||
it constrains the set of versions that Spack will install.
|
||||
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
and commits. Spack will stage and build based off the ``git``
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
and commits. Spack will stage and build based off the ``git``
|
||||
reference provided. Acceptable syntaxes for this are:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
|
||||
# branches and tags
|
||||
foo@git.develop # use the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
|
||||
|
||||
# commit hashes
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
|
||||
|
||||
|
||||
Spack versions from git reference either have an associated version supplied by the user,
|
||||
or infer a relationship to known versions from the structure of the git repository. If an
|
||||
associated version is supplied by the user, Spack treats the git version as equivalent to that
|
||||
@@ -1244,8 +1244,8 @@ For example, for the ``stackstart`` variant:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
mpileaks stackstart=4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart==4 # only mpileaks will have this variant value
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
@@ -1672,9 +1672,13 @@ own install prefix. However, certain packages are typically installed
|
||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||
|
||||
Spack has support for this type of installation as well. In Spack,
|
||||
a package that can live inside the prefix of another package is called
|
||||
an *extension*. Suppose you have Python installed like so:
|
||||
In Spack, installation prefixes are immutable, so this type of installation
|
||||
is not directly supported. However, it is possible to create views that
|
||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||
Views are a convenient way to get a more traditional filesystem structure.
|
||||
Using *extensions*, you can ensure that Python packages always share the
|
||||
same prefix in the view as Python itself. Suppose you have
|
||||
Python installed like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1712,8 +1716,6 @@ You can find extensions for your Python installation like this:
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> None activated.
|
||||
|
||||
The extensions are a subset of what's returned by ``spack list``, and
|
||||
they are packages like any other. They are installed into their own
|
||||
prefixes, and you can see this with ``spack find --paths``:
|
||||
@@ -1741,32 +1743,72 @@ directly when you run ``python``:
|
||||
ImportError: No module named numpy
|
||||
>>>
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Using Extensions
|
||||
^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Extensions in Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are four ways to get ``numpy`` working in Python. The first is
|
||||
to use :ref:`shell-support`. You can simply ``load`` the extension,
|
||||
and it will be added to the ``PYTHONPATH`` in your current shell:
|
||||
The recommended way of working with extensions such as ``py-numpy``
|
||||
above is through :ref:`Environments <environments>`. For example,
|
||||
the following creates an environment in the current working directory
|
||||
with a filesystem view in the ``./view`` directory:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load python
|
||||
$ spack load py-numpy
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
We recommend environments for two reasons. Firstly, environments
|
||||
can be activated (requires :ref:`shell-support`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
|
||||
which sets all the right environment variables such as ``PATH`` and
|
||||
``PYTHONPATH``. This ensures that
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
works. Secondly, even without shell support, the view ensures
|
||||
that Python can locate its extensions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python
|
||||
>>> import numpy
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``spack load``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A more traditional way of using Spack and extensions is ``spack load``
|
||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||
in your current shell, and Python itself will be available in the ``PATH``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load py-numpy
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
Now ``import numpy`` will succeed for as long as you keep your current
|
||||
session open.
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Instead of using Spack's environment modification capabilities through
|
||||
the ``spack load`` command, you can load numpy through your
|
||||
environment modules (using ``environment-modules`` or ``lmod``). This
|
||||
will also add the extension to the ``PYTHONPATH`` in your current
|
||||
shell.
|
||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||
through your environment modules (using ``environment-modules`` or
|
||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||
your current shell.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1776,130 +1818,6 @@ If you do not know the name of the specific numpy module you wish to
|
||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||
the name of the module from the Spack spec.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions in a View
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Another way to use extensions is to create a view, which merges the
|
||||
python installation along with the extensions into a single prefix.
|
||||
See :ref:`configuring_environment_views` for a more in-depth description
|
||||
of views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions Globally
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As an alternative to creating a merged prefix with Python and its extensions,
|
||||
and prior to support for views, Spack has provided a means to install the
|
||||
extension into the Spack installation prefix for the extendee. This has
|
||||
typically been useful since extendable packages typically search their own
|
||||
installation path for addons by default.
|
||||
|
||||
Global activations are performed with the ``spack activate`` command:
|
||||
|
||||
.. _cmd-spack-activate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack activate``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate py-numpy
|
||||
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
Several things have happened here. The user requested that
|
||||
``py-numpy`` be activated in the ``python`` installation it was built
|
||||
with. Spack knows that ``py-numpy`` depends on ``py-nose`` and
|
||||
``py-setuptools``, so it activated those packages first. Finally,
|
||||
once all dependencies were activated in the ``python`` installation,
|
||||
``py-numpy`` was activated as well.
|
||||
|
||||
If we run ``spack extensions`` again, we now see the three new
|
||||
packages listed as activated:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
||||
py-epydoc py-mx py-pylint py-scipy
|
||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> 3 currently activated:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
|
||||
|
||||
Now, when a user runs python, ``numpy`` will be available for import
|
||||
*without* the user having to explicitly load it. ``python@2.7.8`` now
|
||||
acts like a system Python installation with ``numpy`` installed inside
|
||||
of it.
|
||||
|
||||
Spack accomplishes this by symbolically linking the *entire* prefix of
|
||||
the ``py-numpy`` package into the prefix of the ``python`` package. To the
|
||||
python interpreter, it looks like ``numpy`` is installed in the
|
||||
``site-packages`` directory.
|
||||
|
||||
The only limitation of global activation is that you can only have a *single*
|
||||
version of an extension activated at a time. This is because multiple
|
||||
versions of the same extension would conflict if symbolically linked
|
||||
into the same prefix. Users who want a different version of a package
|
||||
can still get it by using environment modules or views, but they will have to
|
||||
explicitly load their preferred version.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack activate --force``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If, for some reason, you want to activate a package *without* its
|
||||
dependencies, you can use ``spack activate --force``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate --force py-numpy
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
.. _cmd-spack-deactivate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack deactivate``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We've seen how activating an extension can be used to set up a default
|
||||
version of a Python module. Obviously, you may want to change that at
|
||||
some point. ``spack deactivate`` is the command for this. There are
|
||||
several variants:
|
||||
|
||||
* ``spack deactivate <extension>`` will deactivate a single
|
||||
extension. If another activated extension depends on this one,
|
||||
Spack will warn you and exit with an error.
|
||||
* ``spack deactivate --force <extension>`` deactivates an extension
|
||||
regardless of packages that depend on it.
|
||||
* ``spack deactivate --all <extension>`` deactivates an extension and
|
||||
all of its dependencies. Use ``--force`` to disregard dependents.
|
||||
* ``spack deactivate --all <extendee>`` deactivates *all* activated
|
||||
extensions of a package. For example, to deactivate *all* python
|
||||
extensions, use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack deactivate --all python
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
-----------
|
||||
CachedCMake
|
||||
-----------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _cudapackage:
|
||||
|
||||
-----------
|
||||
CudaPackage
|
||||
-----------
|
||||
----
|
||||
Cuda
|
||||
----
|
||||
|
||||
Different from other packages, ``CudaPackage`` does not represent a build system.
|
||||
Instead its goal is to simplify and unify usage of ``CUDA`` in other packages by providing a `mixin-class <https://en.wikipedia.org/wiki/Mixin>`_.
|
||||
@@ -80,7 +80,7 @@ standard CUDA compiler flags.
|
||||
|
||||
**cuda_flags**
|
||||
|
||||
This built-in static method returns a list of command line flags
|
||||
This built-in static method returns a list of command line flags
|
||||
for the chosen ``cuda_arch`` value(s). The flags are intended to
|
||||
be passed to the CUDA compiler driver (i.e., ``nvcc``).
|
||||
|
||||
|
||||
@@ -6,9 +6,9 @@
|
||||
.. _inteloneapipackage:
|
||||
|
||||
|
||||
====================
|
||||
IntelOneapiPackage
|
||||
====================
|
||||
===========
|
||||
IntelOneapi
|
||||
===========
|
||||
|
||||
|
||||
.. contents::
|
||||
@@ -36,7 +36,7 @@ For more information on a specific package, do::
|
||||
|
||||
Intel no longer releases new versions of Parallel Studio, which can be
|
||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||
now be found in oneAPI.
|
||||
now be found in oneAPI.
|
||||
|
||||
Examples
|
||||
========
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _intelpackage:
|
||||
|
||||
------------
|
||||
IntelPackage
|
||||
------------
|
||||
-----
|
||||
Intel
|
||||
-----
|
||||
|
||||
.. contents::
|
||||
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _pythonpackage:
|
||||
|
||||
-------------
|
||||
PythonPackage
|
||||
-------------
|
||||
------
|
||||
Python
|
||||
------
|
||||
|
||||
Python packages and modules have their own special build system. This
|
||||
documentation covers everything you'll need to know in order to write
|
||||
@@ -724,10 +724,9 @@ extends vs. depends_on
|
||||
|
||||
This is very similar to the naming dilemma above, with a slight twist.
|
||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` adds
|
||||
the ability to *activate* the package. Activation involves symlinking
|
||||
everything in the installation prefix of the package to the installation
|
||||
prefix of Python. This allows the user to import a Python module without
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||
that the extension and extendee share the same prefix in views.
|
||||
This allows the user to import a Python module without
|
||||
having to add that module to ``PYTHONPATH``.
|
||||
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
@@ -735,7 +734,7 @@ thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
don't use ``extends``, as symlinking the package wouldn't be useful.
|
||||
don't use ``extends``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _rocmpackage:
|
||||
|
||||
-----------
|
||||
ROCmPackage
|
||||
-----------
|
||||
----
|
||||
ROCm
|
||||
----
|
||||
|
||||
The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPackage``,
|
||||
it provides standard variants, dependencies, and conflicts to facilitate building
|
||||
@@ -25,7 +25,7 @@ This package provides the following variants:
|
||||
|
||||
* **rocm**
|
||||
|
||||
This variant is used to enable/disable building with ``rocm``.
|
||||
This variant is used to enable/disable building with ``rocm``.
|
||||
The default is disabled (or ``False``).
|
||||
|
||||
* **amdgpu_target**
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _rpackage:
|
||||
|
||||
--------
|
||||
RPackage
|
||||
--------
|
||||
--
|
||||
R
|
||||
--
|
||||
|
||||
Like Python, R has its own built-in build system.
|
||||
|
||||
@@ -193,10 +193,10 @@ Build system dependencies
|
||||
|
||||
As an extension of the R ecosystem, your package will obviously depend
|
||||
on R to build and run. Normally, we would use ``depends_on`` to express
|
||||
this, but for R packages, we use ``extends``. ``extends`` is similar to
|
||||
``depends_on``, but adds an additional feature: the ability to "activate"
|
||||
the package by symlinking it to the R installation directory. Since
|
||||
every R package needs this, the ``RPackage`` base class contains:
|
||||
this, but for R packages, we use ``extends``. This implies a special
|
||||
dependency on R, which is used to set environment variables such as
|
||||
``R_LIBS`` uniformly. Since every R package needs this, the ``RPackage``
|
||||
base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
||||
@@ -5,15 +5,15 @@
|
||||
|
||||
.. _sourceforgepackage:
|
||||
|
||||
------------------
|
||||
SourceforgePackage
|
||||
------------------
|
||||
-----------
|
||||
Sourceforge
|
||||
-----------
|
||||
|
||||
``SourceforgePackage`` is a
|
||||
``SourceforgePackage`` is a
|
||||
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
||||
sets the URL based on a list of Sourceforge mirrors listed in
|
||||
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
||||
Refer to the package source
|
||||
Refer to the package source
|
||||
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ This package provides a method for populating mirror URLs.
|
||||
It is decorated with `property` so its results are treated as
|
||||
a package attribute.
|
||||
|
||||
Refer to
|
||||
Refer to
|
||||
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
||||
for information on how Spack uses the `urls` attribute during
|
||||
fetching.
|
||||
|
||||
@@ -37,12 +37,6 @@
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib"))
|
||||
else:
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3"))
|
||||
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
@@ -80,6 +74,7 @@
|
||||
"--force", # Overwrite existing files
|
||||
"--no-toc", # Don't create a table of contents file
|
||||
"--output-dir=.", # Directory to place all output
|
||||
"--module-first", # emit module docs before submodule docs
|
||||
]
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"])
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
@@ -160,8 +155,8 @@ def setup(sphinx):
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u"Spack"
|
||||
copyright = u"2013-2021, Lawrence Livermore National Laboratory."
|
||||
project = "Spack"
|
||||
copyright = "2013-2021, Lawrence Livermore National Laboratory."
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -206,12 +201,14 @@ def setup(sphinx):
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.VersionBase"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
@@ -350,7 +347,7 @@ class SpackStyle(DefaultStyle):
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"),
|
||||
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
@@ -378,7 +375,7 @@ class SpackStyle(DefaultStyle):
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)]
|
||||
man_pages = [("index", "spack", "Spack Documentation", ["Todd Gamblin"], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
@@ -393,8 +390,8 @@ class SpackStyle(DefaultStyle):
|
||||
(
|
||||
"index",
|
||||
"Spack",
|
||||
u"Spack Documentation",
|
||||
u"Todd Gamblin",
|
||||
"Spack Documentation",
|
||||
"Todd Gamblin",
|
||||
"Spack",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
|
||||
@@ -394,7 +394,7 @@ are indicated at the start of the path with ``~`` or ``~user``.
|
||||
Spack-specific variables
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack understands several special variables. These are:
|
||||
Spack understands over a dozen special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
@@ -416,6 +416,8 @@ Spack understands several special variables. These are:
|
||||
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
* ``$date``: the current date in the format YYYY-MM-DD
|
||||
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
or with braces to distinguish the variable from surrounding characters:
|
||||
|
||||
@@ -253,27 +253,6 @@ to update them.
|
||||
multiple runs of ``spack style`` just to re-compute line numbers and
|
||||
makes it much easier to fix errors directly off of the CI output.
|
||||
|
||||
.. warning::
|
||||
|
||||
Flake8 and ``pep8-naming`` require a number of dependencies in order
|
||||
to run. If you installed ``py-flake8`` and ``py-pep8-naming``, the
|
||||
easiest way to ensure the right packages are on your ``PYTHONPATH`` is
|
||||
to run::
|
||||
|
||||
spack activate py-flake8
|
||||
spack activate pep8-naming
|
||||
|
||||
so that all of the dependencies are symlinked to a central
|
||||
location. If you see an error message like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
Traceback (most recent call last):
|
||||
File: "/usr/bin/flake8", line 5, in <module>
|
||||
from pkg_resources import load_entry_point
|
||||
ImportError: No module named pkg_resources
|
||||
|
||||
that means Flake8 couldn't find setuptools in your ``PYTHONPATH``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Documentation Tests
|
||||
@@ -309,13 +288,9 @@ All of these can be installed with Spack, e.g.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate py-sphinx
|
||||
$ spack activate py-sphinx-rtd-theme
|
||||
$ spack activate py-sphinxcontrib-programoutput
|
||||
$ spack load py-sphinx py-sphinx-rtd-theme py-sphinxcontrib-programoutput
|
||||
|
||||
so that all of the dependencies are symlinked into that Python's
|
||||
tree. Alternatively, you could arrange for their library
|
||||
directories to be added to PYTHONPATH. If you see an error message
|
||||
so that all of the dependencies are added to PYTHONPATH. If you see an error message
|
||||
like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -175,14 +175,11 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
|
||||
@@ -233,8 +233,8 @@ packages will be listed as roots of the Environment.
|
||||
|
||||
All of the Spack commands that act on the list of installed specs are
|
||||
Environment-sensitive in this way, including ``install``,
|
||||
``uninstall``, ``activate``, ``deactivate``, ``find``, ``extensions``,
|
||||
and more. In the :ref:`environment-configuration` section we will discuss
|
||||
``uninstall``, ``find``, ``extensions``, and more. In the
|
||||
:ref:`environment-configuration` section we will discuss
|
||||
Environment-sensitive commands further.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1070,19 +1070,23 @@ the include is conditional.
|
||||
Building a subset of the environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The generated ``Makefile``\s contain install targets for each spec. Given the hash
|
||||
of a particular spec, you can use the ``.install/<hash>`` target to install the
|
||||
spec with its dependencies. There is also ``.install-deps/<hash>`` to *only* install
|
||||
The generated ``Makefile``\s contain install targets for each spec, identified
|
||||
by ``<name>-<version>-<hash>``. This allows you to install only a subset of the
|
||||
packages in the environment. When packages are unique in the environment, it's
|
||||
enough to know the name and let tab-completion fill out the version and hash.
|
||||
|
||||
The following phony targets are available: ``install/<spec>`` to install the
|
||||
spec with its dependencies, and ``install-deps/<spec>`` to *only* install
|
||||
its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ spack env depfile -o Makefile --make-target-prefix my_env
|
||||
$ spack env depfile -o Makefile
|
||||
|
||||
# Install dependencies in parallel, only show a log on error.
|
||||
$ make -j16 my_env/.install-deps/<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
||||
$ make -j16 install-deps/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
||||
|
||||
# Install the root spec with verbose output.
|
||||
$ make -j16 my_env/.install/<hash> SPACK_INSTALL_FLAGS=--verbose
|
||||
$ make -j16 install/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--verbose
|
||||
@@ -21,8 +21,9 @@ be present on the machine where Spack is run:
|
||||
:header-rows: 1
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, XCode is required. Spack is designed to run on HPC
|
||||
platforms like Cray. Not all packages should be expected
|
||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
||||
to run on HPC platforms like Cray. Not all packages should be expected
|
||||
to work on all platforms.
|
||||
|
||||
A build matrix showing which packages are working on which systems is shown below.
|
||||
@@ -1704,9 +1705,11 @@ dependencies or incompatible build tools like autoconf. Here are several
|
||||
packages known to work on Windows:
|
||||
|
||||
* abseil-cpp
|
||||
* bzip2
|
||||
* clingo
|
||||
* cpuinfo
|
||||
* cmake
|
||||
* hdf5
|
||||
* glm
|
||||
* nasm
|
||||
* netlib-lapack (requires Intel Fortran)
|
||||
|
||||
@@ -67,7 +67,6 @@ or refer to the full manual below.
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
monitoring
|
||||
mirrors
|
||||
module_file_support
|
||||
repositories
|
||||
@@ -78,12 +77,6 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Research
|
||||
|
||||
analyze
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
||||
@@ -1,265 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _monitoring:
|
||||
|
||||
==========
|
||||
Monitoring
|
||||
==========
|
||||
|
||||
You can use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
server to store a database of your packages, builds, and associated metadata
|
||||
for provenance, research, or some other kind of development. You should
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
-------------------
|
||||
Analysis Monitoring
|
||||
-------------------
|
||||
|
||||
To read about how to monitor an analysis (meaning you want to send analysis results
|
||||
to a server) see :ref:`analyze_monitoring`.
|
||||
|
||||
---------------------
|
||||
Monitoring An Install
|
||||
---------------------
|
||||
|
||||
Since an install is typically when you build packages, we logically want
|
||||
to tell spack to monitor during this step. Let's start with an example
|
||||
where we want to monitor the install of hdf5. Unless you have disabled authentication
|
||||
for the server, we first want to export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor hdf5
|
||||
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5
|
||||
|
||||
|
||||
As a precaution, we cut out early in the spack client if you have not provided
|
||||
authentication credentials. For example, if you run the command above without
|
||||
exporting your username or token, you'll see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER
|
||||
|
||||
This extra check is to ensure that we don't start any builds,
|
||||
and then discover that you forgot to export your token. However, if
|
||||
your monitoring server has authentication disabled, you can tell this to
|
||||
the client to skip this step:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-disable-auth hdf5
|
||||
|
||||
If the service is not running, you'll cleanly exit early - the install will
|
||||
not continue if you've asked it to monitor and there is no service.
|
||||
For example, here is what you'll see if the monitoring service is not running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[Errno 111] Connection refused
|
||||
|
||||
|
||||
If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-keep-going hdf5
|
||||
|
||||
This could mean that if a request fails, you only have partial or no data
|
||||
added to your monitoring database. This setting will not be applied to the
|
||||
first request to check if the server is running, but to subsequent requests.
|
||||
If you don't have a monitor server running and you want to build, simply
|
||||
don't provide the ``--monitor`` flag! Finally, if you want to provide one or
|
||||
more tags to your build, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Add one tag, "pizza"
|
||||
$ spack install --monitor --monitor-tags pizza hdf5
|
||||
|
||||
# Add two tags, "pizza" and "pasta"
|
||||
$ spack install --monitor --monitor-tags pizza,pasta hdf5
|
||||
|
||||
|
||||
----------------------------
|
||||
Monitoring with Containerize
|
||||
----------------------------
|
||||
|
||||
The same argument group is available to add to a containerize command.
|
||||
|
||||
^^^^^^
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
To add monitoring to a Docker container recipe generation using the defaults,
|
||||
and assuming a monitor server running on localhost, you would
|
||||
start with a spack.yaml in your present working directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
|
||||
And then do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
spack containerize --monitor
|
||||
|
||||
# and then write to a Dockerfile
|
||||
spack containerize --monitor > Dockerfile
|
||||
|
||||
|
||||
The install command will be edited to include commands for enabling monitoring.
|
||||
However, getting secrets into the container for your monitor server is something
|
||||
that should be done carefully. Specifically you should:
|
||||
|
||||
- Never try to define secrets as ENV, ARG, or using ``--build-arg``
|
||||
- Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer)
|
||||
|
||||
Instead, it's recommended to use buildkit `as explained here <https://pythonspeed.com/articles/docker-build-secrets/>`_.
|
||||
You'll need to again export environment variables for your spack monitor server:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
And then use buildkit along with your build and identifying the name of the secret:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
The secrets are expected to come from your environment, and then will be temporarily mounted and available
|
||||
at ``/run/secrets/<name>``. If you forget to supply them (and authentication is required) the build
|
||||
will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll
|
||||
need to tell Docker to use the host network:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Singularity
|
||||
^^^^^^^^^^^
|
||||
|
||||
To add monitoring to a Singularity container build, the spack.yaml needs to
|
||||
be modified slightly to specify wanting a different format:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
container:
|
||||
format: singularity
|
||||
|
||||
|
||||
Again, generate the recipe:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
$ spack containerize --monitor
|
||||
|
||||
# then write to a Singularity recipe
|
||||
$ spack containerize --monitor > Singularity
|
||||
|
||||
|
||||
Singularity doesn't have a direct way to define secrets at build time, so we have
|
||||
to do a bit of a manual command to add a file, source secrets in it, and remove it.
|
||||
Since Singularity doesn't have layers like Docker, deleting a file will truly
|
||||
remove it from the container and history. So let's say we have this file,
|
||||
``secrets.sh``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# secrets.sh
|
||||
export SPACKMON_USER=spack
|
||||
export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
|
||||
|
||||
We would then generate the Singularity recipe, and add a files section,
|
||||
a source of that file at the start of ``%post``, and **importantly**
|
||||
a removal of the final at the end of that same section.
|
||||
|
||||
.. code-block::
|
||||
|
||||
Bootstrap: docker
|
||||
From: spack/ubuntu-bionic:latest
|
||||
Stage: build
|
||||
|
||||
%files
|
||||
secrets.sh /opt/secrets.sh
|
||||
|
||||
%post
|
||||
. /opt/secrets.sh
|
||||
|
||||
# spack install commands are here
|
||||
...
|
||||
|
||||
# Don't forget to remove here!
|
||||
rm /opt/secrets.sh
|
||||
|
||||
|
||||
You can then build the container as your normally would.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo singularity build container.sif Singularity
|
||||
|
||||
|
||||
------------------
|
||||
Monitoring Offline
|
||||
------------------
|
||||
|
||||
In the case that you want to save monitor results to your filesystem
|
||||
and then upload them later (perhaps you are in an environment where you don't
|
||||
have credentials or it isn't safe to use them) you can use the ``--monitor-save-local``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-save-local hdf5
|
||||
|
||||
This will save results in a subfolder, "monitor" in your designated spack
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||
you are ready to upload them to a spack monitor server:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack monitor upload ~/.spack/reports/monitor
|
||||
|
||||
|
||||
You can choose the root directory of results as shown above, or a specific
|
||||
subdirectory. The command accepts other arguments to specify configuration
|
||||
for the monitor.
|
||||
@@ -2397,13 +2397,15 @@ this because uninstalling the dependency would break the package.
|
||||
|
||||
``build``, ``link``, and ``run`` dependencies all affect the hash of Spack
|
||||
packages (along with ``sha256`` sums of patches and archives used to build the
|
||||
package, and a [canonical hash](https://github.com/spack/spack/pull/28156) of
|
||||
package, and a `canonical hash <https://github.com/spack/spack/pull/28156>`_ of
|
||||
the ``package.py`` recipes). ``test`` dependencies do not affect the package
|
||||
hash, as they are only used to construct a test environment *after* building and
|
||||
installing a given package installation. Older versions of Spack did not include
|
||||
build dependencies in the hash, but this has been
|
||||
[fixed](https://github.com/spack/spack/pull/28504) as of [Spack
|
||||
``v0.18``](https://github.com/spack/spack/releases/tag/v0.18.0)
|
||||
build dependencies in the hash, but this has been
|
||||
`fixed <https://github.com/spack/spack/pull/28504>`_ as of |Spack v0.18|_.
|
||||
|
||||
.. |Spack v0.18| replace:: Spack ``v0.18``
|
||||
.. _Spack v0.18: https://github.com/spack/spack/releases/tag/v0.18.0
|
||||
|
||||
If the dependency type is not specified, Spack uses a default of
|
||||
``('build', 'link')``. This is the common case for compiler languages.
|
||||
@@ -2634,9 +2636,12 @@ extendable package:
|
||||
extends('python')
|
||||
...
|
||||
|
||||
Now, the ``py-numpy`` package can be used as an argument to ``spack
|
||||
activate``. When it is activated, all the files in its prefix will be
|
||||
symbolically linked into the prefix of the python package.
|
||||
This accomplishes a few things. Firstly, the Python package can set special
|
||||
variables such as ``PYTHONPATH`` for all extensions when the run or build
|
||||
environment is set up. Secondly, filesystem views can ensure that extensions
|
||||
are put in the same prefix as their extendee. This ensures that Python in
|
||||
a view can always locate its Python packages, even without environment
|
||||
variables set.
|
||||
|
||||
A package can only extend one other package at a time. To support packages
|
||||
that may extend one of a list of other packages, Spack supports multiple
|
||||
@@ -2684,9 +2689,8 @@ variant(s) are selected. This may be accomplished with conditional
|
||||
...
|
||||
|
||||
Sometimes, certain files in one package will conflict with those in
|
||||
another, which means they cannot both be activated (symlinked) at the
|
||||
same time. In this case, you can tell Spack to ignore those files
|
||||
when it does the activation:
|
||||
another, which means they cannot both be used in a view at the
|
||||
same time. In this case, you can tell Spack to ignore those files:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2698,7 +2702,7 @@ when it does the activation:
|
||||
...
|
||||
|
||||
The code above will prevent everything in the ``$prefix/bin/`` directory
|
||||
from being linked in at activation time.
|
||||
from being linked in a view.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -2722,67 +2726,6 @@ extensions; as a consequence python extension packages (those inheriting from
|
||||
``PythonPackage``) likewise override ``add_files_to_view`` in order to rewrite
|
||||
shebang lines which point to the Python interpreter.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activation & deactivation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Adding an extension to a view is referred to as an activation. If the view is
|
||||
maintained in the Spack installation prefix of the extendee this is called a
|
||||
global activation. Activations may involve updating some centralized state
|
||||
that is maintained by the extendee package, so there can be additional work
|
||||
for adding extensions compared with non-extension packages.
|
||||
|
||||
Spack's ``Package`` class has default ``activate`` and ``deactivate``
|
||||
implementations that handle symbolically linking extensions' prefixes
|
||||
into a specified view. Extendable packages can override these methods
|
||||
to add custom activate/deactivate logic of their own. For example,
|
||||
the ``activate`` and ``deactivate`` methods in the Python class handle
|
||||
symbolic linking of extensions, but they also handle details surrounding
|
||||
Python's ``.pth`` files, and other aspects of Python packaging.
|
||||
|
||||
Spack's extensions mechanism is designed to be extensible, so that
|
||||
other packages (like Ruby, R, Perl, etc.) can provide their own
|
||||
custom extension management logic, as they may not handle modules the
|
||||
same way that Python does.
|
||||
|
||||
Let's look at Python's activate function:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.activate
|
||||
:linenos:
|
||||
|
||||
This function is called on the *extendee* (Python). It first calls
|
||||
``activate`` in the superclass, which handles symlinking the
|
||||
extension package's prefix into the specified view. It then does
|
||||
some special handling of the ``easy-install.pth`` file, part of
|
||||
Python's setuptools.
|
||||
|
||||
Deactivate behaves similarly to activate, but it unlinks files:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.deactivate
|
||||
:linenos:
|
||||
|
||||
Both of these methods call some custom functions in the Python
|
||||
package. See the source for Spack's Python package for details.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Activation arguments
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You may have noticed that the ``activate`` function defined above
|
||||
takes keyword arguments. These are the keyword arguments from
|
||||
``extends()``, and they are passed to both activate and deactivate.
|
||||
|
||||
This capability allows an extension to customize its own activation by
|
||||
passing arguments to the extendee. Extendees can likewise implement
|
||||
custom ``activate()`` and ``deactivate()`` functions to suit their
|
||||
needs.
|
||||
|
||||
The only keyword argument supported by default is the ``ignore``
|
||||
argument, which can take a regex, list of regexes, or a predicate to
|
||||
determine which files *not* to symlink during activation.
|
||||
|
||||
.. _virtual-dependencies:
|
||||
|
||||
--------------------
|
||||
@@ -3584,7 +3527,7 @@ will likely contain some overriding of default builder methods:
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
|
||||
|
||||
@@ -184,13 +184,48 @@ simply run the following commands:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack concretize --force
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
|
||||
The ``--force`` flag tells Spack to overwrite its previous concretization
|
||||
decisions, allowing you to choose a new version of Python. If any of the new
|
||||
packages like Bash are already installed, ``spack install`` won't re-install
|
||||
them, it will keep the symlinks in place.
|
||||
The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack mark -i --all
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
Running ``spack install`` will reexamine your spack environment after
|
||||
a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
|
||||
--------------
|
||||
Uninstallation
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.7/3.6-3.11, , Interpreter for Spack
|
||||
Python, 3.6--3.11, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
|
||||
|
41
lib/spack/env/cc
vendored
41
lib/spack/env/cc
vendored
@@ -440,6 +440,47 @@ while [ $# -ne 0 ]; do
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045)
|
||||
* Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.1.2"
|
||||
__version__ = "0.2.0"
|
||||
|
||||
5
lib/spack/external/archspec/cpu/alias.py
vendored
5
lib/spack/external/archspec/cpu/alias.py
vendored
@@ -3,13 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Aliases for microarchitecture features."""
|
||||
# pylint: disable=useless-object-inheritance
|
||||
from .schema import TARGETS_JSON, LazyDictionary
|
||||
|
||||
_FEATURE_ALIAS_PREDICATE = {}
|
||||
|
||||
|
||||
class FeatureAliasTest(object):
|
||||
class FeatureAliasTest:
|
||||
"""A test that must be passed for a feature alias to succeed.
|
||||
|
||||
Args:
|
||||
@@ -48,7 +47,7 @@ def alias_predicate(func):
|
||||
|
||||
# Check we didn't register anything else with the same name
|
||||
if name in _FEATURE_ALIAS_PREDICATE:
|
||||
msg = 'the alias predicate "{0}" already exists'.format(name)
|
||||
msg = f'the alias predicate "{name}" already exists'
|
||||
raise KeyError(msg)
|
||||
|
||||
_FEATURE_ALIAS_PREDICATE[name] = func
|
||||
|
||||
11
lib/spack/external/archspec/cpu/detect.py
vendored
11
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -11,8 +11,6 @@
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
from .microarchitecture import generic_microarchitecture, TARGETS
|
||||
from .schema import TARGETS_JSON
|
||||
|
||||
@@ -80,10 +78,9 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
with subprocess.Popen(args, stdout=subprocess.PIPE, env=env) as proc:
|
||||
output = proc.communicate()[0]
|
||||
return str(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
@@ -273,7 +270,7 @@ def compatibility_check(architecture_family):
|
||||
this test can be used, e.g. x86_64 or ppc64le etc.
|
||||
"""
|
||||
# Turn the argument into something iterable
|
||||
if isinstance(architecture_family, six.string_types):
|
||||
if isinstance(architecture_family, str):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
|
||||
@@ -5,14 +5,11 @@
|
||||
"""Types and functions to manage information
|
||||
on CPU microarchitectures.
|
||||
"""
|
||||
# pylint: disable=useless-object-inheritance
|
||||
import functools
|
||||
import platform
|
||||
import re
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
import archspec
|
||||
import archspec.cpu.alias
|
||||
import archspec.cpu.schema
|
||||
@@ -27,7 +24,7 @@ def coerce_target_names(func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def _impl(self, other):
|
||||
if isinstance(other, six.string_types):
|
||||
if isinstance(other, str):
|
||||
if other not in TARGETS:
|
||||
msg = '"{0}" is not a valid target name'
|
||||
raise ValueError(msg.format(other))
|
||||
@@ -38,7 +35,7 @@ def _impl(self, other):
|
||||
return _impl
|
||||
|
||||
|
||||
class Microarchitecture(object):
|
||||
class Microarchitecture:
|
||||
"""Represents a specific CPU micro-architecture.
|
||||
|
||||
Args:
|
||||
@@ -150,7 +147,7 @@ def __str__(self):
|
||||
|
||||
def __contains__(self, feature):
|
||||
# Feature must be of a string type, so be defensive about that
|
||||
if not isinstance(feature, six.string_types):
|
||||
if not isinstance(feature, str):
|
||||
msg = "only objects of string types are accepted [got {0}]"
|
||||
raise TypeError(msg.format(str(type(feature))))
|
||||
|
||||
@@ -168,7 +165,7 @@ def family(self):
|
||||
"""Returns the architecture family a given target belongs to"""
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += "[found {0}]".format(", ".join(str(x) for x in roots))
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
|
||||
return roots.pop()
|
||||
@@ -318,9 +315,6 @@ def _known_microarchitectures():
|
||||
"""Returns a dictionary of the known micro-architectures. If the
|
||||
current host platform is unknown adds it too as a generic target.
|
||||
"""
|
||||
# pylint: disable=fixme
|
||||
# TODO: Simplify this logic using object_pairs_hook to OrderedDict
|
||||
# TODO: when we stop supporting python2.6
|
||||
|
||||
def fill_target_from_dict(name, data, targets):
|
||||
"""Recursively fills targets by adding the micro-architecture
|
||||
|
||||
10
lib/spack/external/archspec/cpu/schema.py
vendored
10
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -5,16 +5,12 @@
|
||||
"""Global objects with the content of the microarchitecture
|
||||
JSON file and its schema
|
||||
"""
|
||||
import collections.abc
|
||||
import json
|
||||
import os.path
|
||||
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
class LazyDictionary(collections.abc.MutableMapping):
|
||||
"""Lazy dictionary that gets constructed on first access to any object key
|
||||
|
||||
Args:
|
||||
@@ -56,7 +52,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
with open(filename, "r", encoding="utf-8") as file:
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
||||
@@ -961,21 +961,21 @@
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "18.0:",
|
||||
"versions": "18.0:2021.2",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"versions": ":2021.2",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"versions": ":2021.2",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
@@ -1905,6 +1905,86 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen4": {
|
||||
"from": ["zen3", "x86_64_v4"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"f16c",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"avx",
|
||||
"avx2",
|
||||
"rdseed",
|
||||
"clzero",
|
||||
"aes",
|
||||
"pclmulqdq",
|
||||
"cx16",
|
||||
"movbe",
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"abm",
|
||||
"xsavec",
|
||||
"xsaveopt",
|
||||
"clflushopt",
|
||||
"popcnt",
|
||||
"clwb",
|
||||
"vaes",
|
||||
"vpclmulqdq",
|
||||
"pku",
|
||||
"gfni",
|
||||
"flush_l1d",
|
||||
"erms",
|
||||
"avic",
|
||||
"avx512f",
|
||||
"avx512dq",
|
||||
"avx512ifma",
|
||||
"avx512cd",
|
||||
"avx512bw",
|
||||
"avx512vl",
|
||||
"avx512_bf16",
|
||||
"avx512vbmi",
|
||||
"avx512_vbmi2",
|
||||
"avx512_vnni",
|
||||
"avx512_bitalg",
|
||||
"avx512_vpopcntdq"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
}
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "3.0:3.9",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg",
|
||||
"warnings": "Zen4 processors are not fully supported by AOCC versions < 4.0. For optimal performance please upgrade to a newer version of AOCC"
|
||||
},
|
||||
{
|
||||
"versions": "4.0:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
@@ -2302,7 +2382,6 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
|
||||
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -71,13 +71,12 @@
|
||||
import re
|
||||
import math
|
||||
import multiprocessing
|
||||
import io
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
|
||||
from six import StringIO
|
||||
from six import string_types
|
||||
|
||||
_error_matches = [
|
||||
"^FAIL: ",
|
||||
@@ -246,7 +245,7 @@ def __getitem__(self, line_no):
|
||||
|
||||
def __str__(self):
|
||||
"""Returns event lines and context."""
|
||||
out = StringIO()
|
||||
out = io.StringIO()
|
||||
for i in range(self.start, self.end):
|
||||
if i == self.line_no:
|
||||
out.write(' >> %-6d%s' % (i, self[i]))
|
||||
@@ -386,7 +385,7 @@ def parse(self, stream, context=6, jobs=None):
|
||||
(tuple): two lists containing ``BuildError`` and
|
||||
``BuildWarning`` objects.
|
||||
"""
|
||||
if isinstance(stream, string_types):
|
||||
if isinstance(stream, str):
|
||||
with open(stream) as f:
|
||||
return self.parse(f, context, jobs)
|
||||
|
||||
|
||||
2392
lib/spack/external/py2/argparse.py
vendored
2392
lib/spack/external/py2/argparse.py
vendored
File diff suppressed because it is too large
Load Diff
289
lib/spack/external/py2/functools32/LICENSE
vendored
289
lib/spack/external/py2/functools32/LICENSE
vendored
@@ -1,289 +0,0 @@
|
||||
A. HISTORY OF THE SOFTWARE
|
||||
==========================
|
||||
|
||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||
as a successor of a language called ABC. Guido remains Python's
|
||||
principal author, although it includes many contributions from others.
|
||||
|
||||
In 1995, Guido continued his work on Python at the Corporation for
|
||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||
in Reston, Virginia where he released several versions of the
|
||||
software.
|
||||
|
||||
In May 2000, Guido and the Python core development team moved to
|
||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
||||
non-profit organization created specifically to own Python-related
|
||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
||||
the PSF.
|
||||
|
||||
All Python releases are Open Source (see http://www.opensource.org for
|
||||
the Open Source Definition). Historically, most, but not all, Python
|
||||
releases have also been GPL-compatible; the table below summarizes
|
||||
the various releases.
|
||||
|
||||
Release Derived Year Owner GPL-
|
||||
from compatible? (1)
|
||||
|
||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||
1.6 1.5.2 2000 CNRI no
|
||||
2.0 1.6 2000 BeOpen.com no
|
||||
1.6.1 1.6 2001 CNRI yes (2)
|
||||
2.1 2.0+1.6.1 2001 PSF no
|
||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||
2.2 2.1.1 2001 PSF yes
|
||||
2.1.2 2.1.1 2002 PSF yes
|
||||
2.1.3 2.1.2 2002 PSF yes
|
||||
2.2.1 2.2 2002 PSF yes
|
||||
2.2.2 2.2.1 2002 PSF yes
|
||||
2.2.3 2.2.2 2003 PSF yes
|
||||
2.3 2.2.2 2002-2003 PSF yes
|
||||
2.3.1 2.3 2002-2003 PSF yes
|
||||
2.3.2 2.3.1 2002-2003 PSF yes
|
||||
2.3.3 2.3.2 2002-2003 PSF yes
|
||||
2.3.4 2.3.3 2004 PSF yes
|
||||
2.3.5 2.3.4 2005 PSF yes
|
||||
2.4 2.3 2004 PSF yes
|
||||
2.4.1 2.4 2005 PSF yes
|
||||
2.4.2 2.4.1 2005 PSF yes
|
||||
2.4.3 2.4.2 2006 PSF yes
|
||||
2.4.4 2.4.3 2006 PSF yes
|
||||
2.5 2.4 2006 PSF yes
|
||||
2.5.1 2.5 2007 PSF yes
|
||||
2.5.2 2.5.1 2008 PSF yes
|
||||
2.5.3 2.5.2 2008 PSF yes
|
||||
2.6 2.5 2008 PSF yes
|
||||
2.6.1 2.6 2008 PSF yes
|
||||
2.6.2 2.6.1 2009 PSF yes
|
||||
2.6.3 2.6.2 2009 PSF yes
|
||||
2.6.4 2.6.3 2009 PSF yes
|
||||
2.6.5 2.6.4 2010 PSF yes
|
||||
3.0 2.6 2008 PSF yes
|
||||
3.0.1 3.0 2009 PSF yes
|
||||
3.1 3.0.1 2009 PSF yes
|
||||
3.1.1 3.1 2009 PSF yes
|
||||
3.1.2 3.1.1 2010 PSF yes
|
||||
3.1.3 3.1.2 2010 PSF yes
|
||||
3.1.4 3.1.3 2011 PSF yes
|
||||
3.2 3.1 2011 PSF yes
|
||||
3.2.1 3.2 2011 PSF yes
|
||||
3.2.2 3.2.1 2011 PSF yes
|
||||
3.2.3 3.2.2 2012 PSF yes
|
||||
|
||||
Footnotes:
|
||||
|
||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||
a modified version without making your changes open source. The
|
||||
GPL-compatible licenses make it possible to combine Python with
|
||||
other software that is released under the GPL; the others don't.
|
||||
|
||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||
because its license has a choice of law clause. According to
|
||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||
is "not incompatible" with the GPL.
|
||||
|
||||
Thanks to the many outside volunteers who have worked under Guido's
|
||||
direction to make these releases possible.
|
||||
|
||||
|
||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||
===============================================================
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
|
||||
alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||
-------------------------------------------
|
||||
|
||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||
|
||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||
Individual or Organization ("Licensee") accessing and otherwise using
|
||||
this software in source or binary form and its associated
|
||||
documentation ("the Software").
|
||||
|
||||
2. Subject to the terms and conditions of this BeOpen Python License
|
||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||
and/or display publicly, prepare derivative works, distribute, and
|
||||
otherwise use the Software alone or in any derivative version,
|
||||
provided, however, that the BeOpen Python License is retained in the
|
||||
Software, alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
5. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
6. This License Agreement shall be governed by and interpreted in all
|
||||
respects by the law of the State of California, excluding conflict of
|
||||
law provisions. Nothing in this License Agreement shall be deemed to
|
||||
create any relationship of agency, partnership, or joint venture
|
||||
between BeOpen and Licensee. This License Agreement does not grant
|
||||
permission to use BeOpen trademarks or trade names in a trademark
|
||||
sense to endorse or promote products or services of Licensee, or any
|
||||
third party. As an exception, the "BeOpen Python" logos available at
|
||||
http://www.pythonlabs.com/logos.html may be used according to the
|
||||
permissions granted on that web page.
|
||||
|
||||
7. By copying, installing or otherwise using the software, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||
---------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||
source or binary form and its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||
license to reproduce, analyze, test, perform and/or display publicly,
|
||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||
alone or in any derivative version, provided, however, that CNRI's
|
||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||
Agreement, Licensee may substitute the following text (omitting the
|
||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||
conditions in CNRI's License Agreement. This Agreement together with
|
||||
Python 1.6.1 may be located on the Internet using the following
|
||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||
Agreement may also be obtained from a proxy server on the Internet
|
||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python 1.6.1.
|
||||
|
||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. This License Agreement shall be governed by the federal
|
||||
intellectual property law of the United States, including without
|
||||
limitation the federal copyright law, and, to the extent such
|
||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||
Virginia, excluding Virginia's conflict of law provisions.
|
||||
Notwithstanding the foregoing, with regard to derivative works based
|
||||
on Python 1.6.1 that incorporate non-separable material that was
|
||||
previously distributed under the GNU General Public License (GPL), the
|
||||
law of the Commonwealth of Virginia shall govern this License
|
||||
Agreement only as to issues arising under or with respect to
|
||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||
License Agreement shall be deemed to create any relationship of
|
||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||
License Agreement does not grant permission to use CNRI trademarks or
|
||||
trade name in a trademark sense to endorse or promote products or
|
||||
services of Licensee, or any third party.
|
||||
|
||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||
bound by the terms and conditions of this License Agreement.
|
||||
|
||||
ACCEPT
|
||||
|
||||
|
||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||
--------------------------------------------------
|
||||
|
||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||
The Netherlands. All rights reserved.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and its
|
||||
documentation for any purpose and without fee is hereby granted,
|
||||
provided that the above copyright notice appear in all copies and that
|
||||
both that copyright notice and this permission notice appear in
|
||||
supporting documentation, and that the name of Stichting Mathematisch
|
||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||
distribution of the software without specific, written prior
|
||||
permission.
|
||||
|
||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
@@ -1 +0,0 @@
|
||||
from .functools32 import *
|
||||
@@ -1,158 +0,0 @@
|
||||
"""Drop-in replacement for the thread module.
|
||||
|
||||
Meant to be used as a brain-dead substitute so that threaded code does
|
||||
not need to be rewritten for when the thread module is not present.
|
||||
|
||||
Suggested usage is::
|
||||
|
||||
try:
|
||||
try:
|
||||
import _thread # Python >= 3
|
||||
except:
|
||||
import thread as _thread # Python < 3
|
||||
except ImportError:
|
||||
import _dummy_thread as _thread
|
||||
|
||||
"""
|
||||
# Exports only things specified by thread documentation;
|
||||
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
|
||||
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
|
||||
'interrupt_main', 'LockType']
|
||||
|
||||
# A dummy value
|
||||
TIMEOUT_MAX = 2**31
|
||||
|
||||
# NOTE: this module can be imported early in the extension building process,
|
||||
# and so top level imports of other modules should be avoided. Instead, all
|
||||
# imports are done when needed on a function-by-function basis. Since threads
|
||||
# are disabled, the import lock should not be an issue anyway (??).
|
||||
|
||||
class error(Exception):
|
||||
"""Dummy implementation of _thread.error."""
|
||||
|
||||
def __init__(self, *args):
|
||||
self.args = args
|
||||
|
||||
def start_new_thread(function, args, kwargs={}):
|
||||
"""Dummy implementation of _thread.start_new_thread().
|
||||
|
||||
Compatibility is maintained by making sure that ``args`` is a
|
||||
tuple and ``kwargs`` is a dictionary. If an exception is raised
|
||||
and it is SystemExit (which can be done by _thread.exit()) it is
|
||||
caught and nothing is done; all other exceptions are printed out
|
||||
by using traceback.print_exc().
|
||||
|
||||
If the executed function calls interrupt_main the KeyboardInterrupt will be
|
||||
raised when the function returns.
|
||||
|
||||
"""
|
||||
if type(args) != type(tuple()):
|
||||
raise TypeError("2nd arg must be a tuple")
|
||||
if type(kwargs) != type(dict()):
|
||||
raise TypeError("3rd arg must be a dict")
|
||||
global _main
|
||||
_main = False
|
||||
try:
|
||||
function(*args, **kwargs)
|
||||
except SystemExit:
|
||||
pass
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
_main = True
|
||||
global _interrupt
|
||||
if _interrupt:
|
||||
_interrupt = False
|
||||
raise KeyboardInterrupt
|
||||
|
||||
def exit():
|
||||
"""Dummy implementation of _thread.exit()."""
|
||||
raise SystemExit
|
||||
|
||||
def get_ident():
|
||||
"""Dummy implementation of _thread.get_ident().
|
||||
|
||||
Since this module should only be used when _threadmodule is not
|
||||
available, it is safe to assume that the current process is the
|
||||
only thread. Thus a constant can be safely returned.
|
||||
"""
|
||||
return -1
|
||||
|
||||
def allocate_lock():
|
||||
"""Dummy implementation of _thread.allocate_lock()."""
|
||||
return LockType()
|
||||
|
||||
def stack_size(size=None):
|
||||
"""Dummy implementation of _thread.stack_size()."""
|
||||
if size is not None:
|
||||
raise error("setting thread stack size not supported")
|
||||
return 0
|
||||
|
||||
class LockType(object):
|
||||
"""Class implementing dummy implementation of _thread.LockType.
|
||||
|
||||
Compatibility is maintained by maintaining self.locked_status
|
||||
which is a boolean that stores the state of the lock. Pickling of
|
||||
the lock, though, should not be done since if the _thread module is
|
||||
then used with an unpickled ``lock()`` from here problems could
|
||||
occur from this class not having atomic methods.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.locked_status = False
|
||||
|
||||
def acquire(self, waitflag=None, timeout=-1):
|
||||
"""Dummy implementation of acquire().
|
||||
|
||||
For blocking calls, self.locked_status is automatically set to
|
||||
True and returned appropriately based on value of
|
||||
``waitflag``. If it is non-blocking, then the value is
|
||||
actually checked and not set if it is already acquired. This
|
||||
is all done so that threading.Condition's assert statements
|
||||
aren't triggered and throw a little fit.
|
||||
|
||||
"""
|
||||
if waitflag is None or waitflag:
|
||||
self.locked_status = True
|
||||
return True
|
||||
else:
|
||||
if not self.locked_status:
|
||||
self.locked_status = True
|
||||
return True
|
||||
else:
|
||||
if timeout > 0:
|
||||
import time
|
||||
time.sleep(timeout)
|
||||
return False
|
||||
|
||||
__enter__ = acquire
|
||||
|
||||
def __exit__(self, typ, val, tb):
|
||||
self.release()
|
||||
|
||||
def release(self):
|
||||
"""Release the dummy lock."""
|
||||
# XXX Perhaps shouldn't actually bother to test? Could lead
|
||||
# to problems for complex, threaded code.
|
||||
if not self.locked_status:
|
||||
raise error
|
||||
self.locked_status = False
|
||||
return True
|
||||
|
||||
def locked(self):
|
||||
return self.locked_status
|
||||
|
||||
# Used to signal that interrupt_main was called in a "thread"
|
||||
_interrupt = False
|
||||
# True when not executing in a "thread"
|
||||
_main = True
|
||||
|
||||
def interrupt_main():
|
||||
"""Set _interrupt flag to True to have start_new_thread raise
|
||||
KeyboardInterrupt upon exiting."""
|
||||
if _main:
|
||||
raise KeyboardInterrupt
|
||||
else:
|
||||
global _interrupt
|
||||
_interrupt = True
|
||||
423
lib/spack/external/py2/functools32/functools32.py
vendored
423
lib/spack/external/py2/functools32/functools32.py
vendored
@@ -1,423 +0,0 @@
|
||||
"""functools.py - Tools for working with functions and callable objects
|
||||
"""
|
||||
# Python module wrapper for _functools C module
|
||||
# to allow utilities written in Python to be added
|
||||
# to the functools module.
|
||||
# Written by Nick Coghlan <ncoghlan at gmail.com>
|
||||
# and Raymond Hettinger <python at rcn.com>
|
||||
# Copyright (C) 2006-2010 Python Software Foundation.
|
||||
# See C source code for _functools credits/copyright
|
||||
|
||||
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
|
||||
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
|
||||
|
||||
from _functools import partial, reduce
|
||||
from collections import MutableMapping, namedtuple
|
||||
from .reprlib32 import recursive_repr as _recursive_repr
|
||||
from weakref import proxy as _proxy
|
||||
import sys as _sys
|
||||
try:
|
||||
from thread import allocate_lock as Lock
|
||||
except ImportError:
|
||||
from ._dummy_thread32 import allocate_lock as Lock
|
||||
|
||||
################################################################################
|
||||
### OrderedDict
|
||||
################################################################################
|
||||
|
||||
class _Link(object):
|
||||
__slots__ = 'prev', 'next', 'key', '__weakref__'
|
||||
|
||||
class OrderedDict(dict):
|
||||
'Dictionary that remembers insertion order'
|
||||
# An inherited dict maps keys to values.
|
||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
||||
# The remaining methods are order-aware.
|
||||
# Big-O running times for all methods are the same as regular dictionaries.
|
||||
|
||||
# The internal self.__map dict maps keys to links in a doubly linked list.
|
||||
# The circular doubly linked list starts and ends with a sentinel element.
|
||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
||||
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
|
||||
# The prev links are weakref proxies (to prevent circular references).
|
||||
# Individual links are kept alive by the hard reference in self.__map.
|
||||
# Those hard references disappear when a key is deleted from an OrderedDict.
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
'''Initialize an ordered dictionary. The signature is the same as
|
||||
regular dictionaries, but keyword arguments are not recommended because
|
||||
their insertion order is arbitrary.
|
||||
|
||||
'''
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
self.__hardroot = _Link()
|
||||
self.__root = root = _proxy(self.__hardroot)
|
||||
root.prev = root.next = root
|
||||
self.__map = {}
|
||||
self.__update(*args, **kwds)
|
||||
|
||||
def __setitem__(self, key, value,
|
||||
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
|
||||
'od.__setitem__(i, y) <==> od[i]=y'
|
||||
# Setting a new item creates a new link at the end of the linked list,
|
||||
# and the inherited dictionary is updated with the new key/value pair.
|
||||
if key not in self:
|
||||
self.__map[key] = link = Link()
|
||||
root = self.__root
|
||||
last = root.prev
|
||||
link.prev, link.next, link.key = last, root, key
|
||||
last.next = link
|
||||
root.prev = proxy(link)
|
||||
dict_setitem(self, key, value)
|
||||
|
||||
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
||||
'od.__delitem__(y) <==> del od[y]'
|
||||
# Deleting an existing item uses self.__map to find the link which gets
|
||||
# removed by updating the links in the predecessor and successor nodes.
|
||||
dict_delitem(self, key)
|
||||
link = self.__map.pop(key)
|
||||
link_prev = link.prev
|
||||
link_next = link.next
|
||||
link_prev.next = link_next
|
||||
link_next.prev = link_prev
|
||||
|
||||
def __iter__(self):
|
||||
'od.__iter__() <==> iter(od)'
|
||||
# Traverse the linked list in order.
|
||||
root = self.__root
|
||||
curr = root.next
|
||||
while curr is not root:
|
||||
yield curr.key
|
||||
curr = curr.next
|
||||
|
||||
def __reversed__(self):
|
||||
'od.__reversed__() <==> reversed(od)'
|
||||
# Traverse the linked list in reverse order.
|
||||
root = self.__root
|
||||
curr = root.prev
|
||||
while curr is not root:
|
||||
yield curr.key
|
||||
curr = curr.prev
|
||||
|
||||
def clear(self):
|
||||
'od.clear() -> None. Remove all items from od.'
|
||||
root = self.__root
|
||||
root.prev = root.next = root
|
||||
self.__map.clear()
|
||||
dict.clear(self)
|
||||
|
||||
def popitem(self, last=True):
|
||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
||||
|
||||
'''
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
root = self.__root
|
||||
if last:
|
||||
link = root.prev
|
||||
link_prev = link.prev
|
||||
link_prev.next = root
|
||||
root.prev = link_prev
|
||||
else:
|
||||
link = root.next
|
||||
link_next = link.next
|
||||
root.next = link_next
|
||||
link_next.prev = root
|
||||
key = link.key
|
||||
del self.__map[key]
|
||||
value = dict.pop(self, key)
|
||||
return key, value
|
||||
|
||||
def move_to_end(self, key, last=True):
|
||||
'''Move an existing element to the end (or beginning if last==False).
|
||||
|
||||
Raises KeyError if the element does not exist.
|
||||
When last=True, acts like a fast version of self[key]=self.pop(key).
|
||||
|
||||
'''
|
||||
link = self.__map[key]
|
||||
link_prev = link.prev
|
||||
link_next = link.next
|
||||
link_prev.next = link_next
|
||||
link_next.prev = link_prev
|
||||
root = self.__root
|
||||
if last:
|
||||
last = root.prev
|
||||
link.prev = last
|
||||
link.next = root
|
||||
last.next = root.prev = link
|
||||
else:
|
||||
first = root.next
|
||||
link.prev = root
|
||||
link.next = first
|
||||
root.next = first.prev = link
|
||||
|
||||
def __sizeof__(self):
|
||||
sizeof = _sys.getsizeof
|
||||
n = len(self) + 1 # number of links including root
|
||||
size = sizeof(self.__dict__) # instance dictionary
|
||||
size += sizeof(self.__map) * 2 # internal dict and inherited dict
|
||||
size += sizeof(self.__hardroot) * n # link objects
|
||||
size += sizeof(self.__root) * n # proxy objects
|
||||
return size
|
||||
|
||||
update = __update = MutableMapping.update
|
||||
keys = MutableMapping.keys
|
||||
values = MutableMapping.values
|
||||
items = MutableMapping.items
|
||||
__ne__ = MutableMapping.__ne__
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
||||
value. If key is not found, d is returned if given, otherwise KeyError
|
||||
is raised.
|
||||
|
||||
'''
|
||||
if key in self:
|
||||
result = self[key]
|
||||
del self[key]
|
||||
return result
|
||||
if default is self.__marker:
|
||||
raise KeyError(key)
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
@_recursive_repr()
|
||||
def __repr__(self):
|
||||
'od.__repr__() <==> repr(od)'
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
|
||||
|
||||
def __reduce__(self):
|
||||
'Return state information for pickling'
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
for k in vars(OrderedDict()):
|
||||
inst_dict.pop(k, None)
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def copy(self):
|
||||
'od.copy() -> a shallow copy of od'
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
|
||||
If not specified, the value defaults to None.
|
||||
|
||||
'''
|
||||
self = cls()
|
||||
for key in iterable:
|
||||
self[key] = value
|
||||
return self
|
||||
|
||||
def __eq__(self, other):
|
||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
||||
while comparison to a regular mapping is order-insensitive.
|
||||
|
||||
'''
|
||||
if isinstance(other, OrderedDict):
|
||||
return len(self)==len(other) and \
|
||||
all(p==q for p, q in zip(self.items(), other.items()))
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
# update_wrapper() and wraps() are tools to help write
|
||||
# wrapper functions that can handle naive introspection
|
||||
|
||||
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
|
||||
WRAPPER_UPDATES = ('__dict__',)
|
||||
def update_wrapper(wrapper,
|
||||
wrapped,
|
||||
assigned = WRAPPER_ASSIGNMENTS,
|
||||
updated = WRAPPER_UPDATES):
|
||||
"""Update a wrapper function to look like the wrapped function
|
||||
|
||||
wrapper is the function to be updated
|
||||
wrapped is the original function
|
||||
assigned is a tuple naming the attributes assigned directly
|
||||
from the wrapped function to the wrapper function (defaults to
|
||||
functools.WRAPPER_ASSIGNMENTS)
|
||||
updated is a tuple naming the attributes of the wrapper that
|
||||
are updated with the corresponding attribute from the wrapped
|
||||
function (defaults to functools.WRAPPER_UPDATES)
|
||||
"""
|
||||
wrapper.__wrapped__ = wrapped
|
||||
for attr in assigned:
|
||||
try:
|
||||
value = getattr(wrapped, attr)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
setattr(wrapper, attr, value)
|
||||
for attr in updated:
|
||||
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
|
||||
# Return the wrapper so this can be used as a decorator via partial()
|
||||
return wrapper
|
||||
|
||||
def wraps(wrapped,
|
||||
assigned = WRAPPER_ASSIGNMENTS,
|
||||
updated = WRAPPER_UPDATES):
|
||||
"""Decorator factory to apply update_wrapper() to a wrapper function
|
||||
|
||||
Returns a decorator that invokes update_wrapper() with the decorated
|
||||
function as the wrapper argument and the arguments to wraps() as the
|
||||
remaining arguments. Default arguments are as for update_wrapper().
|
||||
This is a convenience function to simplify applying partial() to
|
||||
update_wrapper().
|
||||
"""
|
||||
return partial(update_wrapper, wrapped=wrapped,
|
||||
assigned=assigned, updated=updated)
|
||||
|
||||
def total_ordering(cls):
|
||||
"""Class decorator that fills in missing ordering methods"""
|
||||
convert = {
|
||||
'__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
|
||||
('__le__', lambda self, other: self < other or self == other),
|
||||
('__ge__', lambda self, other: not self < other)],
|
||||
'__le__': [('__ge__', lambda self, other: not self <= other or self == other),
|
||||
('__lt__', lambda self, other: self <= other and not self == other),
|
||||
('__gt__', lambda self, other: not self <= other)],
|
||||
'__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
|
||||
('__ge__', lambda self, other: self > other or self == other),
|
||||
('__le__', lambda self, other: not self > other)],
|
||||
'__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
|
||||
('__gt__', lambda self, other: self >= other and not self == other),
|
||||
('__lt__', lambda self, other: not self >= other)]
|
||||
}
|
||||
roots = set(dir(cls)) & set(convert)
|
||||
if not roots:
|
||||
raise ValueError('must define at least one ordering operation: < > <= >=')
|
||||
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
|
||||
for opname, opfunc in convert[root]:
|
||||
if opname not in roots:
|
||||
opfunc.__name__ = opname
|
||||
opfunc.__doc__ = getattr(int, opname).__doc__
|
||||
setattr(cls, opname, opfunc)
|
||||
return cls
|
||||
|
||||
def cmp_to_key(mycmp):
|
||||
"""Convert a cmp= function into a key= function"""
|
||||
class K(object):
|
||||
__slots__ = ['obj']
|
||||
def __init__(self, obj):
|
||||
self.obj = obj
|
||||
def __lt__(self, other):
|
||||
return mycmp(self.obj, other.obj) < 0
|
||||
def __gt__(self, other):
|
||||
return mycmp(self.obj, other.obj) > 0
|
||||
def __eq__(self, other):
|
||||
return mycmp(self.obj, other.obj) == 0
|
||||
def __le__(self, other):
|
||||
return mycmp(self.obj, other.obj) <= 0
|
||||
def __ge__(self, other):
|
||||
return mycmp(self.obj, other.obj) >= 0
|
||||
def __ne__(self, other):
|
||||
return mycmp(self.obj, other.obj) != 0
|
||||
__hash__ = None
|
||||
return K
|
||||
|
||||
_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
|
||||
|
||||
def lru_cache(maxsize=100):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||
can grow without bound.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
Access the underlying function with f.__wrapped__.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
|
||||
"""
|
||||
# Users should only access the lru_cache through its public API:
|
||||
# cache_info, cache_clear, and f.__wrapped__
|
||||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
|
||||
def decorating_function(user_function,
|
||||
tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
|
||||
|
||||
hits, misses = [0], [0]
|
||||
kwd_mark = (object(),) # separates positional and keyword args
|
||||
lock = Lock() # needed because OrderedDict isn't threadsafe
|
||||
|
||||
if maxsize is None:
|
||||
cache = dict() # simple cache without ordering or size limit
|
||||
|
||||
@wraps(user_function)
|
||||
def wrapper(*args, **kwds):
|
||||
key = args
|
||||
if kwds:
|
||||
key += kwd_mark + tuple(sorted(kwds.items()))
|
||||
try:
|
||||
result = cache[key]
|
||||
hits[0] += 1
|
||||
return result
|
||||
except KeyError:
|
||||
pass
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
misses[0] += 1
|
||||
return result
|
||||
else:
|
||||
cache = OrderedDict() # ordered least recent to most recent
|
||||
cache_popitem = cache.popitem
|
||||
cache_renew = cache.move_to_end
|
||||
|
||||
@wraps(user_function)
|
||||
def wrapper(*args, **kwds):
|
||||
key = args
|
||||
if kwds:
|
||||
key += kwd_mark + tuple(sorted(kwds.items()))
|
||||
with lock:
|
||||
try:
|
||||
result = cache[key]
|
||||
cache_renew(key) # record recent use of this key
|
||||
hits[0] += 1
|
||||
return result
|
||||
except KeyError:
|
||||
pass
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
cache[key] = result # record recent use of this key
|
||||
misses[0] += 1
|
||||
if len(cache) > maxsize:
|
||||
cache_popitem(0) # purge least recently used cache entry
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(hits[0], misses[0], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
hits[0] = misses[0] = 0
|
||||
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return wrapper
|
||||
|
||||
return decorating_function
|
||||
157
lib/spack/external/py2/functools32/reprlib32.py
vendored
157
lib/spack/external/py2/functools32/reprlib32.py
vendored
@@ -1,157 +0,0 @@
|
||||
"""Redo the builtin repr() (representation) but with limits on most sizes."""
|
||||
|
||||
__all__ = ["Repr", "repr", "recursive_repr"]
|
||||
|
||||
import __builtin__ as builtins
|
||||
from itertools import islice
|
||||
try:
|
||||
from thread import get_ident
|
||||
except ImportError:
|
||||
from _dummy_thread32 import get_ident
|
||||
|
||||
def recursive_repr(fillvalue='...'):
|
||||
'Decorator to make a repr function return fillvalue for a recursive call'
|
||||
|
||||
def decorating_function(user_function):
|
||||
repr_running = set()
|
||||
|
||||
def wrapper(self):
|
||||
key = id(self), get_ident()
|
||||
if key in repr_running:
|
||||
return fillvalue
|
||||
repr_running.add(key)
|
||||
try:
|
||||
result = user_function(self)
|
||||
finally:
|
||||
repr_running.discard(key)
|
||||
return result
|
||||
|
||||
# Can't use functools.wraps() here because of bootstrap issues
|
||||
wrapper.__module__ = getattr(user_function, '__module__')
|
||||
wrapper.__doc__ = getattr(user_function, '__doc__')
|
||||
wrapper.__name__ = getattr(user_function, '__name__')
|
||||
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
|
||||
return wrapper
|
||||
|
||||
return decorating_function
|
||||
|
||||
class Repr:
|
||||
|
||||
def __init__(self):
|
||||
self.maxlevel = 6
|
||||
self.maxtuple = 6
|
||||
self.maxlist = 6
|
||||
self.maxarray = 5
|
||||
self.maxdict = 4
|
||||
self.maxset = 6
|
||||
self.maxfrozenset = 6
|
||||
self.maxdeque = 6
|
||||
self.maxstring = 30
|
||||
self.maxlong = 40
|
||||
self.maxother = 30
|
||||
|
||||
def repr(self, x):
|
||||
return self.repr1(x, self.maxlevel)
|
||||
|
||||
def repr1(self, x, level):
|
||||
typename = type(x).__name__
|
||||
if ' ' in typename:
|
||||
parts = typename.split()
|
||||
typename = '_'.join(parts)
|
||||
if hasattr(self, 'repr_' + typename):
|
||||
return getattr(self, 'repr_' + typename)(x, level)
|
||||
else:
|
||||
return self.repr_instance(x, level)
|
||||
|
||||
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
|
||||
n = len(x)
|
||||
if level <= 0 and n:
|
||||
s = '...'
|
||||
else:
|
||||
newlevel = level - 1
|
||||
repr1 = self.repr1
|
||||
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
|
||||
if n > maxiter: pieces.append('...')
|
||||
s = ', '.join(pieces)
|
||||
if n == 1 and trail: right = trail + right
|
||||
return '%s%s%s' % (left, s, right)
|
||||
|
||||
def repr_tuple(self, x, level):
|
||||
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
|
||||
|
||||
def repr_list(self, x, level):
|
||||
return self._repr_iterable(x, level, '[', ']', self.maxlist)
|
||||
|
||||
def repr_array(self, x, level):
|
||||
header = "array('%s', [" % x.typecode
|
||||
return self._repr_iterable(x, level, header, '])', self.maxarray)
|
||||
|
||||
def repr_set(self, x, level):
|
||||
x = _possibly_sorted(x)
|
||||
return self._repr_iterable(x, level, 'set([', '])', self.maxset)
|
||||
|
||||
def repr_frozenset(self, x, level):
|
||||
x = _possibly_sorted(x)
|
||||
return self._repr_iterable(x, level, 'frozenset([', '])',
|
||||
self.maxfrozenset)
|
||||
|
||||
def repr_deque(self, x, level):
|
||||
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
|
||||
|
||||
def repr_dict(self, x, level):
|
||||
n = len(x)
|
||||
if n == 0: return '{}'
|
||||
if level <= 0: return '{...}'
|
||||
newlevel = level - 1
|
||||
repr1 = self.repr1
|
||||
pieces = []
|
||||
for key in islice(_possibly_sorted(x), self.maxdict):
|
||||
keyrepr = repr1(key, newlevel)
|
||||
valrepr = repr1(x[key], newlevel)
|
||||
pieces.append('%s: %s' % (keyrepr, valrepr))
|
||||
if n > self.maxdict: pieces.append('...')
|
||||
s = ', '.join(pieces)
|
||||
return '{%s}' % (s,)
|
||||
|
||||
def repr_str(self, x, level):
|
||||
s = builtins.repr(x[:self.maxstring])
|
||||
if len(s) > self.maxstring:
|
||||
i = max(0, (self.maxstring-3)//2)
|
||||
j = max(0, self.maxstring-3-i)
|
||||
s = builtins.repr(x[:i] + x[len(x)-j:])
|
||||
s = s[:i] + '...' + s[len(s)-j:]
|
||||
return s
|
||||
|
||||
def repr_int(self, x, level):
|
||||
s = builtins.repr(x) # XXX Hope this isn't too slow...
|
||||
if len(s) > self.maxlong:
|
||||
i = max(0, (self.maxlong-3)//2)
|
||||
j = max(0, self.maxlong-3-i)
|
||||
s = s[:i] + '...' + s[len(s)-j:]
|
||||
return s
|
||||
|
||||
def repr_instance(self, x, level):
|
||||
try:
|
||||
s = builtins.repr(x)
|
||||
# Bugs in x.__repr__() can cause arbitrary
|
||||
# exceptions -- then make up something
|
||||
except Exception:
|
||||
return '<%s instance at %x>' % (x.__class__.__name__, id(x))
|
||||
if len(s) > self.maxother:
|
||||
i = max(0, (self.maxother-3)//2)
|
||||
j = max(0, self.maxother-3-i)
|
||||
s = s[:i] + '...' + s[len(s)-j:]
|
||||
return s
|
||||
|
||||
|
||||
def _possibly_sorted(x):
|
||||
# Since not all sequences of items can be sorted and comparison
|
||||
# functions may raise arbitrary exceptions, return an unsorted
|
||||
# sequence in that case.
|
||||
try:
|
||||
return sorted(x)
|
||||
except Exception:
|
||||
return list(x)
|
||||
|
||||
aRepr = Repr()
|
||||
repr = aRepr.repr
|
||||
103
lib/spack/external/py2/typing.py
vendored
103
lib/spack/external/py2/typing.py
vendored
@@ -1,103 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""
|
||||
This is a fake set of symbols to allow spack to import typing in python
|
||||
versions where we do not support type checking (<3)
|
||||
"""
|
||||
from collections import defaultdict
|
||||
|
||||
# (1) Unparameterized types.
|
||||
Annotated = object
|
||||
Any = object
|
||||
AnyStr = object
|
||||
ByteString = object
|
||||
Counter = object
|
||||
Final = object
|
||||
Hashable = object
|
||||
NoReturn = object
|
||||
Sized = object
|
||||
SupportsAbs = object
|
||||
SupportsBytes = object
|
||||
SupportsComplex = object
|
||||
SupportsFloat = object
|
||||
SupportsIndex = object
|
||||
SupportsInt = object
|
||||
SupportsRound = object
|
||||
|
||||
# (2) Parameterized types.
|
||||
AbstractSet = defaultdict(lambda: object)
|
||||
AsyncContextManager = defaultdict(lambda: object)
|
||||
AsyncGenerator = defaultdict(lambda: object)
|
||||
AsyncIterable = defaultdict(lambda: object)
|
||||
AsyncIterator = defaultdict(lambda: object)
|
||||
Awaitable = defaultdict(lambda: object)
|
||||
Callable = defaultdict(lambda: object)
|
||||
ChainMap = defaultdict(lambda: object)
|
||||
ClassVar = defaultdict(lambda: object)
|
||||
Collection = defaultdict(lambda: object)
|
||||
Container = defaultdict(lambda: object)
|
||||
ContextManager = defaultdict(lambda: object)
|
||||
Coroutine = defaultdict(lambda: object)
|
||||
DefaultDict = defaultdict(lambda: object)
|
||||
Deque = defaultdict(lambda: object)
|
||||
Dict = defaultdict(lambda: object)
|
||||
ForwardRef = defaultdict(lambda: object)
|
||||
FrozenSet = defaultdict(lambda: object)
|
||||
Generator = defaultdict(lambda: object)
|
||||
Generic = defaultdict(lambda: object)
|
||||
ItemsView = defaultdict(lambda: object)
|
||||
Iterable = defaultdict(lambda: object)
|
||||
Iterator = defaultdict(lambda: object)
|
||||
KeysView = defaultdict(lambda: object)
|
||||
List = defaultdict(lambda: object)
|
||||
Literal = defaultdict(lambda: object)
|
||||
Mapping = defaultdict(lambda: object)
|
||||
MappingView = defaultdict(lambda: object)
|
||||
MutableMapping = defaultdict(lambda: object)
|
||||
MutableSequence = defaultdict(lambda: object)
|
||||
MutableSet = defaultdict(lambda: object)
|
||||
NamedTuple = defaultdict(lambda: object)
|
||||
Optional = defaultdict(lambda: object)
|
||||
OrderedDict = defaultdict(lambda: object)
|
||||
Reversible = defaultdict(lambda: object)
|
||||
Sequence = defaultdict(lambda: object)
|
||||
Set = defaultdict(lambda: object)
|
||||
Tuple = defaultdict(lambda: object)
|
||||
Type = defaultdict(lambda: object)
|
||||
TypedDict = defaultdict(lambda: object)
|
||||
Union = defaultdict(lambda: object)
|
||||
ValuesView = defaultdict(lambda: object)
|
||||
|
||||
# (3) Type variable declarations.
|
||||
TypeVar = lambda *args, **kwargs: None
|
||||
|
||||
# (4) Functions.
|
||||
cast = lambda _type, x: x
|
||||
get_args = None
|
||||
get_origin = None
|
||||
get_type_hints = None
|
||||
no_type_check = None
|
||||
no_type_check_decorator = None
|
||||
|
||||
## typing_extensions
|
||||
# We get a ModuleNotFoundError when attempting to import anything from typing_extensions
|
||||
# if we separate this into a separate typing_extensions.py file for some reason.
|
||||
|
||||
# (1) Unparameterized types.
|
||||
IntVar = object
|
||||
Literal = object
|
||||
NewType = object
|
||||
Text = object
|
||||
|
||||
# (2) Parameterized types.
|
||||
Protocol = defaultdict(lambda: object)
|
||||
|
||||
# (3) Macro for avoiding evaluation except during type checking.
|
||||
TYPE_CHECKING = False
|
||||
|
||||
# (4) Decorators.
|
||||
final = lambda x: x
|
||||
overload = lambda x: x
|
||||
runtime_checkable = lambda x: x
|
||||
@@ -7,11 +7,10 @@
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import io
|
||||
import re
|
||||
import sys
|
||||
|
||||
from six import StringIO
|
||||
|
||||
|
||||
class Command(object):
|
||||
"""Parsed representation of a command from argparse.
|
||||
@@ -181,7 +180,7 @@ def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
||||
self.rst_levels = rst_levels
|
||||
|
||||
def format(self, cmd):
|
||||
string = StringIO()
|
||||
string = io.StringIO()
|
||||
string.write(self.begin_command(cmd.prog))
|
||||
|
||||
if cmd.description:
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# isort: off
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3,):
|
||||
from itertools import ifilter as filter
|
||||
from itertools import imap as map
|
||||
from itertools import izip as zip
|
||||
from itertools import izip_longest as zip_longest # novm
|
||||
from urllib import urlencode as urlencode
|
||||
from urllib import urlopen as urlopen
|
||||
else:
|
||||
filter = filter
|
||||
map = map
|
||||
zip = zip
|
||||
from itertools import zip_longest as zip_longest # novm # noqa: F401
|
||||
from urllib.parse import urlencode as urlencode # novm # noqa: F401
|
||||
from urllib.request import urlopen as urlopen # novm # noqa: F401
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Hashable as Hashable # novm
|
||||
from collections.abc import Iterable as Iterable # novm
|
||||
from collections.abc import Mapping as Mapping # novm
|
||||
from collections.abc import MutableMapping as MutableMapping # novm
|
||||
from collections.abc import MutableSequence as MutableSequence # novm
|
||||
from collections.abc import MutableSet as MutableSet # novm
|
||||
from collections.abc import Sequence as Sequence # novm
|
||||
else:
|
||||
from collections import Hashable as Hashable # noqa: F401
|
||||
from collections import Iterable as Iterable # noqa: F401
|
||||
from collections import Mapping as Mapping # noqa: F401
|
||||
from collections import MutableMapping as MutableMapping # noqa: F401
|
||||
from collections import MutableSequence as MutableSequence # noqa: F401
|
||||
from collections import MutableSet as MutableSet # noqa: F401
|
||||
from collections import Sequence as Sequence # noqa: F401
|
||||
982
lib/spack/llnl/util/envmod.py
Normal file
982
lib/spack/llnl/util/envmod.py
Normal file
@@ -0,0 +1,982 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Utilities for setting and modifying environment variables."""
|
||||
import collections
|
||||
import contextlib
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
import llnl.util.executable as executable
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.path import path_to_os_path
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
system_paths = (
|
||||
["/", "/usr", "/usr/local"]
|
||||
if not is_windows
|
||||
else ["C:\\", "C:\\Program Files", "C:\\Program Files (x86)", "C:\\Users", "C:\\ProgramData"]
|
||||
)
|
||||
suffixes = ["bin", "bin64", "include", "lib", "lib64"] if not is_windows else []
|
||||
system_dirs = [os.path.join(p, s) for s in suffixes for p in system_paths] + system_paths
|
||||
|
||||
|
||||
def is_system_path(path):
|
||||
"""Predicate that given a path returns True if it is a system path,
|
||||
False otherwise.
|
||||
|
||||
Args:
|
||||
path (str): path to a directory
|
||||
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
return path and os.path.normpath(path) in system_dirs
|
||||
|
||||
|
||||
def filter_system_paths(paths):
|
||||
"""Return only paths that are not system paths."""
|
||||
return [p for p in paths if not is_system_path(p)]
|
||||
|
||||
|
||||
def deprioritize_system_paths(paths):
|
||||
"""Put system paths at the end of paths, otherwise preserving order."""
|
||||
filtered_paths = filter_system_paths(paths)
|
||||
fp = set(filtered_paths)
|
||||
return filtered_paths + [p for p in paths if p not in fp]
|
||||
|
||||
|
||||
_shell_set_strings = {
|
||||
"sh": "export {0}={1};\n",
|
||||
"csh": "setenv {0} {1};\n",
|
||||
"fish": "set -gx {0} {1};\n",
|
||||
"bat": 'set "{0}={1}"\n',
|
||||
}
|
||||
|
||||
|
||||
_shell_unset_strings = {
|
||||
"sh": "unset {0};\n",
|
||||
"csh": "unsetenv {0};\n",
|
||||
"fish": "set -e {0};\n",
|
||||
"bat": 'set "{0}="\n',
|
||||
}
|
||||
|
||||
|
||||
tracing_enabled = False
|
||||
|
||||
|
||||
def prune_duplicate_paths(paths):
|
||||
"""Returns the paths with duplicates removed, order preserved."""
|
||||
return list(llnl.util.lang.dedupe(paths))
|
||||
|
||||
|
||||
def get_path(name):
|
||||
path = os.environ.get(name, "").strip()
|
||||
if path:
|
||||
return path.split(os.pathsep)
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def env_flag(name):
|
||||
if name in os.environ:
|
||||
value = os.environ[name].lower()
|
||||
return value == "true" or value == "1"
|
||||
return False
|
||||
|
||||
|
||||
def path_set(var_name, directories):
|
||||
path_str = os.pathsep.join(str(dir) for dir in directories)
|
||||
os.environ[var_name] = path_str
|
||||
|
||||
|
||||
def path_put_first(var_name, directories):
|
||||
"""Puts the provided directories first in the path, adding them
|
||||
if they're not already there.
|
||||
"""
|
||||
|
||||
path = os.environ.get(var_name, "").split(os.pathsep)
|
||||
|
||||
for dir in directories:
|
||||
if dir in path:
|
||||
path.remove(dir)
|
||||
|
||||
new_path = tuple(directories) + tuple(path)
|
||||
path_set(var_name, new_path)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def set_env(**kwargs):
|
||||
"""Temporarily sets and restores environment variables.
|
||||
|
||||
Variables can be set as keyword arguments to this function.
|
||||
"""
|
||||
saved = {}
|
||||
for var, value in kwargs.items():
|
||||
if var in os.environ:
|
||||
saved[var] = os.environ[var]
|
||||
|
||||
if value is None:
|
||||
if var in os.environ:
|
||||
del os.environ[var]
|
||||
else:
|
||||
os.environ[var] = value
|
||||
|
||||
yield
|
||||
|
||||
for var, value in kwargs.items():
|
||||
if var in saved:
|
||||
os.environ[var] = saved[var]
|
||||
else:
|
||||
if var in os.environ:
|
||||
del os.environ[var]
|
||||
|
||||
|
||||
class NameModifier(object):
|
||||
def __init__(self, name, **kwargs):
|
||||
self.name = name
|
||||
self.separator = kwargs.get("separator", os.pathsep)
|
||||
self.args = {"name": name, "separator": self.separator}
|
||||
|
||||
self.args.update(kwargs)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, NameModifier):
|
||||
return False
|
||||
return self.name == other.name
|
||||
|
||||
def update_args(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
self.args.update(kwargs)
|
||||
|
||||
|
||||
class NameValueModifier(object):
|
||||
def __init__(self, name, value, **kwargs):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.separator = kwargs.get("separator", os.pathsep)
|
||||
self.args = {"name": name, "value": value, "separator": self.separator}
|
||||
self.args.update(kwargs)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, NameValueModifier):
|
||||
return False
|
||||
return (
|
||||
self.name == other.name
|
||||
and self.value == other.value
|
||||
and self.separator == other.separator
|
||||
)
|
||||
|
||||
def update_args(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
self.args.update(kwargs)
|
||||
|
||||
|
||||
class SetEnv(NameValueModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("SetEnv: {0}={1}".format(self.name, str(self.value)), level=3)
|
||||
env[self.name] = str(self.value)
|
||||
|
||||
|
||||
class AppendFlagsEnv(NameValueModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("AppendFlagsEnv: {0}={1}".format(self.name, str(self.value)), level=3)
|
||||
if self.name in env and env[self.name]:
|
||||
env[self.name] += self.separator + str(self.value)
|
||||
else:
|
||||
env[self.name] = str(self.value)
|
||||
|
||||
|
||||
class UnsetEnv(NameModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("UnsetEnv: {0}".format(self.name), level=3)
|
||||
# Avoid throwing if the variable was not set
|
||||
env.pop(self.name, None)
|
||||
|
||||
|
||||
class RemoveFlagsEnv(NameValueModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("RemoveFlagsEnv: {0}-{1}".format(self.name, str(self.value)), level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
flags = environment_value.split(self.separator) if environment_value else []
|
||||
flags = [f for f in flags if f != self.value]
|
||||
env[self.name] = self.separator.join(flags)
|
||||
|
||||
|
||||
class SetPath(NameValueModifier):
|
||||
def execute(self, env):
|
||||
string_path = concatenate_paths(self.value, separator=self.separator)
|
||||
tty.debug("SetPath: {0}={1}".format(self.name, string_path), level=3)
|
||||
env[self.name] = string_path
|
||||
|
||||
|
||||
class AppendPath(NameValueModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("AppendPath: {0}+{1}".format(self.name, str(self.value)), level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories.append(path_to_os_path(os.path.normpath(self.value)).pop())
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class PrependPath(NameValueModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("PrependPath: {0}+{1}".format(self.name, str(self.value)), level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories = [path_to_os_path(os.path.normpath(self.value)).pop()] + directories
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class RemovePath(NameValueModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("RemovePath: {0}-{1}".format(self.name, str(self.value)), level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories = [
|
||||
path_to_os_path(os.path.normpath(x)).pop()
|
||||
for x in directories
|
||||
if x != path_to_os_path(os.path.normpath(self.value)).pop()
|
||||
]
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class DeprioritizeSystemPaths(NameModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("DeprioritizeSystemPaths: {0}".format(self.name), level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories = deprioritize_system_paths(
|
||||
[path_to_os_path(os.path.normpath(x)).pop() for x in directories]
|
||||
)
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class PruneDuplicatePaths(NameModifier):
|
||||
def execute(self, env):
|
||||
tty.debug("PruneDuplicatePaths: {0}".format(self.name), level=3)
|
||||
environment_value = env.get(self.name, "")
|
||||
directories = environment_value.split(self.separator) if environment_value else []
|
||||
directories = prune_duplicate_paths(
|
||||
[path_to_os_path(os.path.normpath(x)).pop() for x in directories]
|
||||
)
|
||||
env[self.name] = self.separator.join(directories)
|
||||
|
||||
|
||||
class EnvironmentModifications(object):
|
||||
"""Keeps track of requests to modify the current environment.
|
||||
|
||||
Each call to a method to modify the environment stores the extra
|
||||
information on the caller in the request:
|
||||
|
||||
* 'filename' : filename of the module where the caller is defined
|
||||
* 'lineno': line number where the request occurred
|
||||
* 'context' : line of code that issued the request that failed
|
||||
"""
|
||||
|
||||
def __init__(self, other=None, traced=None):
|
||||
"""Initializes a new instance, copying commands from 'other'
|
||||
if it is not None.
|
||||
|
||||
Args:
|
||||
other (EnvironmentModifications): list of environment modifications
|
||||
to be extended (optional)
|
||||
traced (bool): enable or disable stack trace inspection to log the origin
|
||||
of the environment modifications.
|
||||
"""
|
||||
self.traced = tracing_enabled if traced is None else bool(traced)
|
||||
self.env_modifications = []
|
||||
if other is not None:
|
||||
self.extend(other)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.env_modifications)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.env_modifications)
|
||||
|
||||
def extend(self, other):
|
||||
self._check_other(other)
|
||||
self.env_modifications.extend(other.env_modifications)
|
||||
|
||||
@staticmethod
|
||||
def _check_other(other):
|
||||
if not isinstance(other, EnvironmentModifications):
|
||||
raise TypeError("other must be an instance of EnvironmentModifications")
|
||||
|
||||
def _maybe_trace(self, kwargs):
|
||||
"""Provide the modification with stack trace info so that we can track its
|
||||
origin to find issues in packages. This is very slow and expensive."""
|
||||
if not self.traced:
|
||||
return
|
||||
|
||||
stack = inspect.stack()
|
||||
try:
|
||||
_, filename, lineno, _, context, index = stack[2]
|
||||
context = context[index].strip()
|
||||
except Exception:
|
||||
filename = "unknown file"
|
||||
lineno = "unknown line"
|
||||
context = "unknown context"
|
||||
kwargs.update({"filename": filename, "lineno": lineno, "context": context})
|
||||
|
||||
def set(self, name, value, **kwargs):
|
||||
"""Stores a request to set an environment variable.
|
||||
|
||||
Args:
|
||||
name: name of the environment variable to be set
|
||||
value: value of the environment variable
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = SetEnv(name, value, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def append_flags(self, name, value, sep=" ", **kwargs):
|
||||
"""
|
||||
Stores in the current object a request to append to an env variable
|
||||
|
||||
Args:
|
||||
name: name of the environment variable to be appended to
|
||||
value: value to append to the environment variable
|
||||
Appends with spaces separating different additions to the variable
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
kwargs.update({"separator": sep})
|
||||
item = AppendFlagsEnv(name, value, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def unset(self, name, **kwargs):
|
||||
"""Stores a request to unset an environment variable.
|
||||
|
||||
Args:
|
||||
name: name of the environment variable to be unset
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = UnsetEnv(name, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def remove_flags(self, name, value, sep=" ", **kwargs):
|
||||
"""
|
||||
Stores in the current object a request to remove flags from an
|
||||
env variable
|
||||
|
||||
Args:
|
||||
name: name of the environment variable to be removed from
|
||||
value: value to remove to the environment variable
|
||||
sep: separator to assume for environment variable
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
kwargs.update({"separator": sep})
|
||||
item = RemoveFlagsEnv(name, value, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def set_path(self, name, elements, **kwargs):
|
||||
"""Stores a request to set a path generated from a list.
|
||||
|
||||
Args:
|
||||
name: name o the environment variable to be set.
|
||||
elements: elements of the path to set.
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = SetPath(name, elements, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def append_path(self, name, path, **kwargs):
|
||||
"""Stores a request to append a path to a path list.
|
||||
|
||||
Args:
|
||||
name: name of the path list in the environment
|
||||
path: path to be appended
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = AppendPath(name, path, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def prepend_path(self, name, path, **kwargs):
|
||||
"""Same as `append_path`, but the path is pre-pended.
|
||||
|
||||
Args:
|
||||
name: name of the path list in the environment
|
||||
path: path to be pre-pended
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = PrependPath(name, path, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def remove_path(self, name, path, **kwargs):
|
||||
"""Stores a request to remove a path from a path list.
|
||||
|
||||
Args:
|
||||
name: name of the path list in the environment
|
||||
path: path to be removed
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = RemovePath(name, path, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def deprioritize_system_paths(self, name, **kwargs):
|
||||
"""Stores a request to deprioritize system paths in a path list,
|
||||
otherwise preserving the order.
|
||||
|
||||
Args:
|
||||
name: name of the path list in the environment.
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = DeprioritizeSystemPaths(name, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def prune_duplicate_paths(self, name, **kwargs):
|
||||
"""Stores a request to remove duplicates from a path list, otherwise
|
||||
preserving the order.
|
||||
|
||||
Args:
|
||||
name: name of the path list in the environment.
|
||||
"""
|
||||
self._maybe_trace(kwargs)
|
||||
item = PruneDuplicatePaths(name, **kwargs)
|
||||
self.env_modifications.append(item)
|
||||
|
||||
def group_by_name(self):
|
||||
"""Returns a dict of the modifications grouped by variable name.
|
||||
|
||||
Returns:
|
||||
dict mapping the environment variable name to the modifications to
|
||||
be done on it
|
||||
"""
|
||||
modifications = collections.defaultdict(list)
|
||||
for item in self:
|
||||
modifications[item.name].append(item)
|
||||
return modifications
|
||||
|
||||
def is_unset(self, var_name):
|
||||
modifications = self.group_by_name()
|
||||
var_updates = modifications.get(var_name, None)
|
||||
if not var_updates:
|
||||
# We did not explicitly unset it
|
||||
return False
|
||||
|
||||
# The last modification must unset the variable for it to be considered
|
||||
# unset
|
||||
return type(var_updates[-1]) == UnsetEnv
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Clears the current list of modifications
|
||||
"""
|
||||
self.env_modifications = []
|
||||
|
||||
def reversed(self):
|
||||
"""
|
||||
Returns the EnvironmentModifications object that will reverse self
|
||||
|
||||
Only creates reversals for additions to the environment, as reversing
|
||||
``unset`` and ``remove_path`` modifications is impossible.
|
||||
|
||||
Reversable operations are set(), prepend_path(), append_path(),
|
||||
set_path(), and append_flags().
|
||||
"""
|
||||
rev = EnvironmentModifications()
|
||||
|
||||
for envmod in reversed(self.env_modifications):
|
||||
if type(envmod) == SetEnv:
|
||||
tty.debug("Reversing `Set` environment operation may lose " "original value")
|
||||
rev.unset(envmod.name)
|
||||
elif type(envmod) == AppendPath:
|
||||
rev.remove_path(envmod.name, envmod.value)
|
||||
elif type(envmod) == PrependPath:
|
||||
rev.remove_path(envmod.name, envmod.value)
|
||||
elif type(envmod) == SetPath:
|
||||
tty.debug("Reversing `SetPath` environment operation may lose " "original value")
|
||||
rev.unset(envmod.name)
|
||||
elif type(envmod) == AppendFlagsEnv:
|
||||
rev.remove_flags(envmod.name, envmod.value)
|
||||
else:
|
||||
# This is an un-reversable operation
|
||||
tty.warn(
|
||||
"Skipping reversal of unreversable operation"
|
||||
"%s %s" % (type(envmod), envmod.name)
|
||||
)
|
||||
|
||||
return rev
|
||||
|
||||
def apply_modifications(self, env=None):
|
||||
"""Applies the modifications and clears the list."""
|
||||
# Use os.environ if not specified
|
||||
# Do not copy, we want to modify it in place
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
modifications = self.group_by_name()
|
||||
# Apply modifications one variable at a time
|
||||
for name, actions in sorted(modifications.items()):
|
||||
for x in actions:
|
||||
x.execute(env)
|
||||
|
||||
def shell_modifications(self, shell="sh", explicit=False, env=None):
|
||||
"""Return shell code to apply the modifications and clears the list."""
|
||||
modifications = self.group_by_name()
|
||||
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
new_env = env.copy()
|
||||
|
||||
for name, actions in sorted(modifications.items()):
|
||||
for x in actions:
|
||||
x.execute(new_env)
|
||||
|
||||
if "MANPATH" in new_env and not new_env.get("MANPATH").endswith(":"):
|
||||
new_env["MANPATH"] += ":"
|
||||
|
||||
cmds = ""
|
||||
|
||||
for name in sorted(set(modifications)):
|
||||
new = new_env.get(name, None)
|
||||
old = env.get(name, None)
|
||||
if explicit or new != old:
|
||||
if new is None:
|
||||
cmds += _shell_unset_strings[shell].format(name)
|
||||
else:
|
||||
if sys.platform != "win32":
|
||||
cmd = _shell_set_strings[shell].format(name, shlex.quote(new_env[name]))
|
||||
else:
|
||||
cmd = _shell_set_strings[shell].format(name, new_env[name])
|
||||
cmds += cmd
|
||||
return cmds
|
||||
|
||||
@staticmethod
|
||||
def from_sourcing_file(filename, *arguments, **kwargs):
|
||||
"""Constructs an instance of a
|
||||
:py:class:`llnl.util.envmod.EnvironmentModifications` object
|
||||
that has the same effect as sourcing a file.
|
||||
|
||||
Args:
|
||||
filename (str): the file to be sourced
|
||||
*arguments (list): arguments to pass on the command line
|
||||
|
||||
Keyword Args:
|
||||
shell (str): the shell to use (default: ``bash``)
|
||||
shell_options (str): options passed to the shell (default: ``-c``)
|
||||
source_command (str): the command to run (default: ``source``)
|
||||
suppress_output (str): redirect used to suppress output of command
|
||||
(default: ``&> /dev/null``)
|
||||
concatenate_on_success (str): operator used to execute a command
|
||||
only when the previous command succeeds (default: ``&&``)
|
||||
exclude ([str or re]): ignore any modifications of these
|
||||
variables (default: [])
|
||||
include ([str or re]): always respect modifications of these
|
||||
variables (default: []). Supersedes any excluded variables.
|
||||
clean (bool): in addition to removing empty entries,
|
||||
also remove duplicate entries (default: False).
|
||||
"""
|
||||
tty.debug("EnvironmentModifications.from_sourcing_file: {0}".format(filename))
|
||||
# Check if the file actually exists
|
||||
if not os.path.isfile(filename):
|
||||
msg = "Trying to source non-existing file: {0}".format(filename)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Prepare include and exclude lists of environment variable names
|
||||
exclude = kwargs.get("exclude", [])
|
||||
include = kwargs.get("include", [])
|
||||
clean = kwargs.get("clean", False)
|
||||
|
||||
# Other variables unrelated to sourcing a file
|
||||
exclude.extend(
|
||||
[
|
||||
# Bash internals
|
||||
"SHLVL",
|
||||
"_",
|
||||
"PWD",
|
||||
"OLDPWD",
|
||||
"PS1",
|
||||
"PS2",
|
||||
"ENV",
|
||||
# Environment modules v4
|
||||
"LOADEDMODULES",
|
||||
"_LMFILES_",
|
||||
"BASH_FUNC_module()",
|
||||
"MODULEPATH",
|
||||
"MODULES_(.*)",
|
||||
r"(\w*)_mod(quar|share)",
|
||||
# Lmod configuration
|
||||
r"LMOD_(.*)",
|
||||
"MODULERCFILE",
|
||||
]
|
||||
)
|
||||
|
||||
# Compute the environments before and after sourcing
|
||||
before = sanitize(
|
||||
environment_after_sourcing_files(os.devnull, **kwargs),
|
||||
exclude=exclude,
|
||||
include=include,
|
||||
)
|
||||
file_and_args = (filename,) + arguments
|
||||
after = sanitize(
|
||||
environment_after_sourcing_files(file_and_args, **kwargs),
|
||||
exclude=exclude,
|
||||
include=include,
|
||||
)
|
||||
|
||||
# Delegate to the other factory
|
||||
return EnvironmentModifications.from_environment_diff(before, after, clean)
|
||||
|
||||
@staticmethod
|
||||
def from_environment_diff(before, after, clean=False):
|
||||
"""Constructs an instance of a
|
||||
:py:class:`llnl.util.envmod.EnvironmentModifications` object
|
||||
from the diff of two dictionaries.
|
||||
|
||||
Args:
|
||||
before (dict): environment before the modifications are applied
|
||||
after (dict): environment after the modifications are applied
|
||||
clean (bool): in addition to removing empty entries, also remove
|
||||
duplicate entries
|
||||
"""
|
||||
# Fill the EnvironmentModifications instance
|
||||
env = EnvironmentModifications()
|
||||
# New variables
|
||||
new_variables = list(set(after) - set(before))
|
||||
# Variables that have been unset
|
||||
unset_variables = list(set(before) - set(after))
|
||||
# Variables that have been modified
|
||||
common_variables = set(before).intersection(set(after))
|
||||
modified_variables = [x for x in common_variables if before[x] != after[x]]
|
||||
# Consistent output order - looks nicer, easier comparison...
|
||||
new_variables.sort()
|
||||
unset_variables.sort()
|
||||
modified_variables.sort()
|
||||
|
||||
def return_separator_if_any(*args):
|
||||
separators = ":", ";"
|
||||
for separator in separators:
|
||||
for arg in args:
|
||||
if separator in arg:
|
||||
return separator
|
||||
return None
|
||||
|
||||
# Add variables to env.
|
||||
# Assume that variables with 'PATH' in the name or that contain
|
||||
# separators like ':' or ';' are more likely to be paths
|
||||
for x in new_variables:
|
||||
sep = return_separator_if_any(after[x])
|
||||
if sep:
|
||||
env.prepend_path(x, after[x], separator=sep)
|
||||
elif "PATH" in x:
|
||||
env.prepend_path(x, after[x])
|
||||
else:
|
||||
# We just need to set the variable to the new value
|
||||
env.set(x, after[x])
|
||||
|
||||
for x in unset_variables:
|
||||
env.unset(x)
|
||||
|
||||
for x in modified_variables:
|
||||
value_before = before[x]
|
||||
value_after = after[x]
|
||||
sep = return_separator_if_any(value_before, value_after)
|
||||
if sep:
|
||||
before_list = value_before.split(sep)
|
||||
after_list = value_after.split(sep)
|
||||
|
||||
# Filter out empty strings
|
||||
before_list = list(filter(None, before_list))
|
||||
after_list = list(filter(None, after_list))
|
||||
|
||||
# Remove duplicate entries (worse matching, bloats env)
|
||||
if clean:
|
||||
before_list = list(llnl.util.lang.dedupe(before_list))
|
||||
after_list = list(llnl.util.lang.dedupe(after_list))
|
||||
# The reassembled cleaned entries
|
||||
value_before = sep.join(before_list)
|
||||
value_after = sep.join(after_list)
|
||||
|
||||
# Paths that have been removed
|
||||
remove_list = [ii for ii in before_list if ii not in after_list]
|
||||
# Check that nothing has been added in the middle of
|
||||
# before_list
|
||||
remaining_list = [ii for ii in before_list if ii in after_list]
|
||||
try:
|
||||
start = after_list.index(remaining_list[0])
|
||||
end = after_list.index(remaining_list[-1])
|
||||
search = sep.join(after_list[start : end + 1])
|
||||
except IndexError:
|
||||
env.prepend_path(x, value_after)
|
||||
continue
|
||||
|
||||
if search not in value_before:
|
||||
# We just need to set the variable to the new value
|
||||
env.prepend_path(x, value_after)
|
||||
else:
|
||||
try:
|
||||
prepend_list = after_list[:start]
|
||||
prepend_list.reverse() # Preserve order after prepend
|
||||
except KeyError:
|
||||
prepend_list = []
|
||||
try:
|
||||
append_list = after_list[end + 1 :]
|
||||
except KeyError:
|
||||
append_list = []
|
||||
|
||||
for item in remove_list:
|
||||
env.remove_path(x, item)
|
||||
for item in append_list:
|
||||
env.append_path(x, item)
|
||||
for item in prepend_list:
|
||||
env.prepend_path(x, item)
|
||||
else:
|
||||
# We just need to set the variable to the new value
|
||||
env.set(x, value_after)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def concatenate_paths(paths, separator=os.pathsep):
|
||||
"""Concatenates an iterable of paths into a string of paths separated by
|
||||
separator, defaulting to colon.
|
||||
|
||||
Args:
|
||||
paths: iterable of paths
|
||||
separator: the separator to use, default ';' windows, ':' otherwise
|
||||
|
||||
Returns:
|
||||
string
|
||||
"""
|
||||
return separator.join(str(item) for item in paths)
|
||||
|
||||
|
||||
def set_or_unset_not_first(variable, changes, errstream):
|
||||
"""Check if we are going to set or unset something after other
|
||||
modifications have already been requested.
|
||||
"""
|
||||
indexes = [
|
||||
ii
|
||||
for ii, item in enumerate(changes)
|
||||
if ii != 0 and not item.args.get("force", False) and type(item) in [SetEnv, UnsetEnv]
|
||||
]
|
||||
if indexes:
|
||||
good = "\t \t{context} at {filename}:{lineno}"
|
||||
nogood = "\t--->\t{context} at {filename}:{lineno}"
|
||||
message = "Suspicious requests to set or unset '{var}' found"
|
||||
errstream(message.format(var=variable))
|
||||
for ii, item in enumerate(changes):
|
||||
print_format = nogood if ii in indexes else good
|
||||
errstream(print_format.format(**item.args))
|
||||
|
||||
|
||||
def validate(env, errstream):
|
||||
"""Validates the environment modifications to check for the presence of
|
||||
suspicious patterns. Prompts a warning for everything that was found.
|
||||
|
||||
Current checks:
|
||||
- set or unset variables after other changes on the same variable
|
||||
|
||||
Args:
|
||||
env: list of environment modifications
|
||||
"""
|
||||
if not env.traced:
|
||||
return
|
||||
modifications = env.group_by_name()
|
||||
for variable, list_of_changes in sorted(modifications.items()):
|
||||
set_or_unset_not_first(variable, list_of_changes, errstream)
|
||||
|
||||
|
||||
def inspect_path(root, inspections, exclude=None):
|
||||
"""Inspects ``root`` to search for the subdirectories in ``inspections``.
|
||||
Adds every path found to a list of prepend-path commands and returns it.
|
||||
|
||||
Args:
|
||||
root (str): absolute path where to search for subdirectories
|
||||
|
||||
inspections (dict): maps relative paths to a list of environment
|
||||
variables that will be modified if the path exists. The
|
||||
modifications are not performed immediately, but stored in a
|
||||
command object that is returned to client
|
||||
|
||||
exclude (typing.Callable): optional callable. If present it must accept an
|
||||
absolute path and return True if it should be excluded from the
|
||||
inspection
|
||||
|
||||
Examples:
|
||||
|
||||
The following lines execute an inspection in ``/usr`` to search for
|
||||
``/usr/include`` and ``/usr/lib64``. If found we want to prepend
|
||||
``/usr/include`` to ``CPATH`` and ``/usr/lib64`` to ``MY_LIB64_PATH``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Set up the dictionary containing the inspection
|
||||
inspections = {
|
||||
'include': ['CPATH'],
|
||||
'lib64': ['MY_LIB64_PATH']
|
||||
}
|
||||
|
||||
# Get back the list of command needed to modify the environment
|
||||
env = inspect_path('/usr', inspections)
|
||||
|
||||
# Eventually execute the commands
|
||||
env.apply_modifications()
|
||||
|
||||
Returns:
|
||||
instance of EnvironmentModifications containing the requested
|
||||
modifications
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = lambda x: False
|
||||
|
||||
env = EnvironmentModifications()
|
||||
# Inspect the prefix to check for the existence of common directories
|
||||
for relative_path, variables in inspections.items():
|
||||
expected = os.path.join(root, relative_path)
|
||||
|
||||
if os.path.isdir(expected) and not exclude(expected):
|
||||
for variable in variables:
|
||||
env.prepend_path(variable, expected)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def preserve_environment(*variables):
|
||||
"""Ensures that the value of the environment variables passed as
|
||||
arguments is the same before entering to the context manager and after
|
||||
exiting it.
|
||||
|
||||
Variables that are unset before entering the context manager will be
|
||||
explicitly unset on exit.
|
||||
|
||||
Args:
|
||||
variables (list): list of environment variables to be preserved
|
||||
"""
|
||||
cache = {}
|
||||
for var in variables:
|
||||
# The environment variable to be preserved might not be there.
|
||||
# In that case store None as a placeholder.
|
||||
cache[var] = os.environ.get(var, None)
|
||||
|
||||
yield
|
||||
|
||||
for var in variables:
|
||||
value = cache[var]
|
||||
msg = "[PRESERVE_ENVIRONMENT]"
|
||||
if value is not None:
|
||||
# Print a debug statement if the value changed
|
||||
if var not in os.environ:
|
||||
msg += ' {0} was unset, will be reset to "{1}"'
|
||||
tty.debug(msg.format(var, value))
|
||||
elif os.environ[var] != value:
|
||||
msg += ' {0} was set to "{1}", will be reset to "{2}"'
|
||||
tty.debug(msg.format(var, os.environ[var], value))
|
||||
os.environ[var] = value
|
||||
elif var in os.environ:
|
||||
msg += ' {0} was set to "{1}", will be unset'
|
||||
tty.debug(msg.format(var, os.environ[var]))
|
||||
del os.environ[var]
|
||||
|
||||
|
||||
def environment_after_sourcing_files(*files, **kwargs):
|
||||
"""Returns a dictionary with the environment that one would have
|
||||
after sourcing the files passed as argument.
|
||||
|
||||
Args:
|
||||
*files: each item can either be a string containing the path
|
||||
of the file to be sourced or a sequence, where the first element
|
||||
is the file to be sourced and the remaining are arguments to be
|
||||
passed to the command line
|
||||
|
||||
Keyword Args:
|
||||
env (dict): the initial environment (default: current environment)
|
||||
shell (str): the shell to use (default: ``/bin/bash``)
|
||||
shell_options (str): options passed to the shell (default: ``-c``)
|
||||
source_command (str): the command to run (default: ``source``)
|
||||
suppress_output (str): redirect used to suppress output of command
|
||||
(default: ``&> /dev/null``)
|
||||
concatenate_on_success (str): operator used to execute a command
|
||||
only when the previous command succeeds (default: ``&&``)
|
||||
"""
|
||||
# Set the shell executable that will be used to source files
|
||||
shell_cmd = kwargs.get("shell", "/bin/bash")
|
||||
shell_options = kwargs.get("shell_options", "-c")
|
||||
source_command = kwargs.get("source_command", "source")
|
||||
suppress_output = kwargs.get("suppress_output", "&> /dev/null")
|
||||
concatenate_on_success = kwargs.get("concatenate_on_success", "&&")
|
||||
|
||||
shell = executable.Executable(" ".join([shell_cmd, shell_options]))
|
||||
|
||||
def _source_single_file(file_and_args, environment):
|
||||
source_file = [source_command]
|
||||
source_file.extend(x for x in file_and_args)
|
||||
source_file = " ".join(source_file)
|
||||
|
||||
# If the environment contains 'python' use it, if not
|
||||
# go with sys.executable. Below we just need a working
|
||||
# Python interpreter, not necessarily sys.executable.
|
||||
python_cmd = executable.which("python3", "python", "python2")
|
||||
python_cmd = python_cmd.path if python_cmd else sys.executable
|
||||
|
||||
dump_cmd = "import os, json; print(json.dumps(dict(os.environ)))"
|
||||
dump_environment = python_cmd + ' -E -c "{0}"'.format(dump_cmd)
|
||||
|
||||
# Try to source the file
|
||||
source_file_arguments = " ".join(
|
||||
[
|
||||
source_file,
|
||||
suppress_output,
|
||||
concatenate_on_success,
|
||||
dump_environment,
|
||||
]
|
||||
)
|
||||
output = shell(source_file_arguments, output=str, env=environment, ignore_quotes=True)
|
||||
return json.loads(output)
|
||||
|
||||
current_environment = kwargs.get("env", dict(os.environ))
|
||||
for f in files:
|
||||
# Normalize the input to the helper function
|
||||
if isinstance(f, str):
|
||||
f = [f]
|
||||
|
||||
current_environment = _source_single_file(f, environment=current_environment)
|
||||
|
||||
return current_environment
|
||||
|
||||
|
||||
def sanitize(environment, exclude, include):
|
||||
"""Returns a copy of the input dictionary where all the keys that
|
||||
match an excluded pattern and don't match an included pattern are
|
||||
removed.
|
||||
|
||||
Args:
|
||||
environment (dict): input dictionary
|
||||
exclude (list): literals or regex patterns to be excluded
|
||||
include (list): literals or regex patterns to be included
|
||||
"""
|
||||
|
||||
def set_intersection(fullset, *args):
|
||||
# A set intersection using string literals and regexs
|
||||
meta = "[" + re.escape("[$()*?[]^{|}") + "]"
|
||||
subset = fullset & set(args) # As literal
|
||||
for name in args:
|
||||
if re.search(meta, name):
|
||||
pattern = re.compile(name)
|
||||
for k in fullset:
|
||||
if re.match(pattern, k):
|
||||
subset.add(k)
|
||||
return subset
|
||||
|
||||
# Don't modify input, make a copy instead
|
||||
environment = dict(environment)
|
||||
|
||||
# include supersedes any excluded items
|
||||
prune = set_intersection(set(environment), *exclude)
|
||||
prune -= set_intersection(prune, *include)
|
||||
for k in prune:
|
||||
environment.pop(k, None)
|
||||
|
||||
return environment
|
||||
@@ -8,14 +8,10 @@
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from six import string_types, text_type
|
||||
from six.moves import shlex_quote
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.error
|
||||
from spack.util.path import Path, format_os_path, path_to_os_path, system_path_filter
|
||||
from llnl.util.path import Path, format_os_path, path_to_os_path, system_path_filter
|
||||
|
||||
__all__ = ["Executable", "which", "ProcessError"]
|
||||
|
||||
@@ -30,13 +26,20 @@ def __init__(self, name):
|
||||
# filter back to platform dependent path
|
||||
self.exe = path_to_os_path(*self.exe)
|
||||
self.default_env = {}
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
|
||||
self.default_envmod = EnvironmentModifications()
|
||||
self._default_envmod = None
|
||||
self.returncode = None
|
||||
|
||||
if not self.exe:
|
||||
raise ProcessError("Cannot construct executable for '%s'" % name)
|
||||
raise ProcessError(f"Cannot construct executable for '{name}'")
|
||||
|
||||
@property
|
||||
def default_envmod(self):
|
||||
from llnl.util.envmod import EnvironmentModifications
|
||||
|
||||
if self._default_envmod is None:
|
||||
self._default_envmod = EnvironmentModifications()
|
||||
|
||||
return self._default_envmod
|
||||
|
||||
@system_path_filter
|
||||
def add_default_arg(self, arg):
|
||||
@@ -125,6 +128,8 @@ def __call__(self, *args, **kwargs):
|
||||
By default, the subprocess inherits the parent's file descriptors.
|
||||
|
||||
"""
|
||||
from llnl.util.envmod import EnvironmentModifications
|
||||
|
||||
# Environment
|
||||
env_arg = kwargs.get("env", None)
|
||||
|
||||
@@ -133,8 +138,6 @@ def __call__(self, *args, **kwargs):
|
||||
self.default_envmod.apply_modifications(env)
|
||||
env.update(self.default_env)
|
||||
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
|
||||
# Apply env argument
|
||||
if isinstance(env_arg, EnvironmentModifications):
|
||||
env_arg.apply_modifications(env)
|
||||
@@ -168,7 +171,7 @@ def __call__(self, *args, **kwargs):
|
||||
raise ValueError("Cannot use `str` as input stream.")
|
||||
|
||||
def streamify(arg, mode):
|
||||
if isinstance(arg, string_types):
|
||||
if isinstance(arg, str):
|
||||
return open(arg, mode), True
|
||||
elif arg in (str, str.split):
|
||||
return subprocess.PIPE, False
|
||||
@@ -213,44 +216,43 @@ def streamify(arg, mode):
|
||||
result = ""
|
||||
if output in (str, str.split):
|
||||
if sys.platform == "win32":
|
||||
outstr = text_type(out.decode("ISO-8859-1"))
|
||||
outstr = str(out.decode("ISO-8859-1"))
|
||||
else:
|
||||
outstr = text_type(out.decode("utf-8"))
|
||||
outstr = str(out.decode("utf-8"))
|
||||
result += outstr
|
||||
if output is str.split:
|
||||
sys.stdout.write(outstr)
|
||||
if error in (str, str.split):
|
||||
if sys.platform == "win32":
|
||||
errstr = text_type(err.decode("ISO-8859-1"))
|
||||
errstr = str(err.decode("ISO-8859-1"))
|
||||
else:
|
||||
errstr = text_type(err.decode("utf-8"))
|
||||
errstr = str(err.decode("utf-8"))
|
||||
result += errstr
|
||||
if error is str.split:
|
||||
sys.stderr.write(errstr)
|
||||
|
||||
rc = self.returncode = proc.returncode
|
||||
if fail_on_error and rc != 0 and (rc not in ignore_errors):
|
||||
long_msg = cmd_line_string
|
||||
msg = f"Command {cmd_line_string} exited with status {proc.returncode}."
|
||||
if result:
|
||||
# If the output is not captured in the result, it will have
|
||||
# been stored either in the specified files (e.g. if
|
||||
# 'output' specifies a file) or written to the parent's
|
||||
# stdout/stderr (e.g. if 'output' is not specified)
|
||||
long_msg += "\n" + result
|
||||
msg += "\n" + result
|
||||
|
||||
raise ProcessError("Command exited with status %d:" % proc.returncode, long_msg)
|
||||
raise ProcessError(msg)
|
||||
|
||||
return result
|
||||
|
||||
except OSError as e:
|
||||
raise ProcessError("%s: %s" % (self.exe[0], e.strerror), "Command: " + cmd_line_string)
|
||||
raise ProcessError(f"{self.exe[0]}: {e.strerror}\n Command: '{cmd_line_string}'")
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
if fail_on_error:
|
||||
raise ProcessError(
|
||||
str(e),
|
||||
"\nExit status %d when invoking command: %s"
|
||||
% (proc.returncode, cmd_line_string),
|
||||
f"{str(e)}\n"
|
||||
f" Exit status {proc.returncode} when invoking command: '{cmd_line_string}'"
|
||||
)
|
||||
|
||||
finally:
|
||||
@@ -278,16 +280,31 @@ def __str__(self):
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def which_string(*args, **kwargs):
|
||||
"""Like ``which()``, but return a string instead of an ``Executable``."""
|
||||
path = kwargs.get("path", os.environ.get("PATH", ""))
|
||||
required = kwargs.get("required", False)
|
||||
def which_string(*args: str, path: Optional[Union[List[str], str]] = None, required: bool = False):
|
||||
"""Finds an executable in the path like command-line which.
|
||||
|
||||
if isinstance(path, string_types):
|
||||
If given multiple executables, returns the first one that is found.
|
||||
If no executables are found, returns None.
|
||||
|
||||
Parameters:
|
||||
*args: One or more executables to search for
|
||||
|
||||
Keyword Arguments:
|
||||
path: colon-separated (semicolon-separated on windows) string or list of
|
||||
paths to search. Defaults to ``os.environ["PATH"]``
|
||||
required: If set to ``True``, raise an error if executable not found
|
||||
|
||||
Returns:
|
||||
Absolute path of the first executable found.
|
||||
"""
|
||||
if path is None:
|
||||
path = os.environ.get("PATH") or ""
|
||||
|
||||
if isinstance(path, str):
|
||||
path = path.split(os.pathsep)
|
||||
|
||||
for name in args:
|
||||
win_candidates = []
|
||||
win_candidates: List[str] = []
|
||||
if sys.platform == "win32" and (not name.endswith(".exe") and not name.endswith(".bat")):
|
||||
win_candidates = [name + ext for ext in [".exe", ".bat"]]
|
||||
candidate_names = [name] if not win_candidates else win_candidates
|
||||
@@ -312,19 +329,19 @@ def which_string(*args, **kwargs):
|
||||
return exe
|
||||
|
||||
if required:
|
||||
raise CommandNotFoundError("spack requires '%s'. Make sure it is in your path." % args[0])
|
||||
raise CommandNotFoundError(args[0])
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def which(*args, **kwargs):
|
||||
def which(*args: str, path: Optional[Union[List[str], str]] = None, required: bool = False):
|
||||
"""Finds an executable in the path like command-line which.
|
||||
|
||||
If given multiple executables, returns the first one that is found.
|
||||
If no executables are found, returns None.
|
||||
|
||||
Parameters:
|
||||
*args (str): One or more executables to search for
|
||||
*args: One or more executables to search for
|
||||
|
||||
Keyword Arguments:
|
||||
path (list or str): The path to search. Defaults to ``PATH``
|
||||
@@ -333,13 +350,17 @@ def which(*args, **kwargs):
|
||||
Returns:
|
||||
Executable: The first executable that is found in the path
|
||||
"""
|
||||
exe = which_string(*args, **kwargs)
|
||||
return Executable(shlex_quote(exe)) if exe else None
|
||||
exe = which_string(*args, path=path, required=required)
|
||||
return Executable(shlex.quote(exe)) if exe else None
|
||||
|
||||
|
||||
class ProcessError(spack.error.SpackError):
|
||||
class ProcessError(Exception):
|
||||
"""ProcessErrors are raised when Executables exit with an error code."""
|
||||
|
||||
|
||||
class CommandNotFoundError(spack.error.SpackError):
|
||||
class CommandNotFoundError(Exception):
|
||||
"""Raised when ``which()`` can't find a required executable."""
|
||||
|
||||
def __init__(self, command: str):
|
||||
super().__init__(f"Couldn't find command '{command}'. Make sure it is in your path.")
|
||||
self.command = command
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import collections.abc
|
||||
import errno
|
||||
import glob
|
||||
import hashlib
|
||||
@@ -17,16 +18,12 @@
|
||||
from contextlib import contextmanager
|
||||
from sys import platform as _platform
|
||||
|
||||
import six
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.compat import Sequence
|
||||
from llnl.util.executable import CommandNotFoundError, Executable, which
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.path import path_to_os_path, system_path_filter
|
||||
from llnl.util.symlink import islink, symlink
|
||||
|
||||
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
|
||||
if not is_windows:
|
||||
@@ -101,7 +98,9 @@ def getuid():
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
if is_windows:
|
||||
if os.path.exists(dst):
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
@@ -290,9 +289,10 @@ def groupid_to_group(x):
|
||||
shutil.copy(filename, tmp_filename)
|
||||
|
||||
try:
|
||||
extra_kwargs = {}
|
||||
if sys.version_info > (3, 0):
|
||||
extra_kwargs = {"errors": "surrogateescape"}
|
||||
# To avoid translating line endings (\n to \r\n and vis versa)
|
||||
# we force os.open to ignore translations and use the line endings
|
||||
# the file comes with
|
||||
extra_kwargs = {"errors": "surrogateescape", "newline": ""}
|
||||
|
||||
# Open as a text file and filter until the end of the file is
|
||||
# reached or we found a marker in the line if it was specified
|
||||
@@ -522,7 +522,7 @@ def chgrp(path, group, follow_symlinks=True):
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
|
||||
if isinstance(group, six.string_types):
|
||||
if isinstance(group, str):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
@@ -1000,45 +1000,16 @@ def hash_directory(directory, ignore=[]):
|
||||
return md5_hash.hexdigest()
|
||||
|
||||
|
||||
def _try_unlink(path):
|
||||
try:
|
||||
os.unlink(path)
|
||||
except (IOError, OSError):
|
||||
# But if that fails, that's OK.
|
||||
pass
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def write_tmp_and_move(path, mode="w"):
|
||||
"""Write to a temporary file in the same directory, then move into place."""
|
||||
# Rely on NamedTemporaryFile to give a unique file without races
|
||||
# in the directory of the target file.
|
||||
file = tempfile.NamedTemporaryFile(
|
||||
prefix="." + os.path.basename(path),
|
||||
suffix=".tmp",
|
||||
dir=os.path.dirname(path),
|
||||
mode=mode,
|
||||
delete=False, # we delete it ourselves
|
||||
)
|
||||
tmp_path = file.name
|
||||
|
||||
try:
|
||||
yield file
|
||||
except BaseException:
|
||||
# On any failure, try to remove the temporary file.
|
||||
_try_unlink(tmp_path)
|
||||
raise
|
||||
finally:
|
||||
# Always close the file decriptor
|
||||
file.close()
|
||||
|
||||
# Atomically move into existence.
|
||||
try:
|
||||
os.rename(tmp_path, path)
|
||||
except (IOError, OSError):
|
||||
_try_unlink(tmp_path)
|
||||
raise
|
||||
def write_tmp_and_move(filename):
|
||||
"""Write to a temporary file, then move into place."""
|
||||
dirname = os.path.dirname(filename)
|
||||
basename = os.path.basename(filename)
|
||||
tmp = os.path.join(dirname, ".%s.tmp" % basename)
|
||||
with open(tmp, "w") as f:
|
||||
yield f
|
||||
shutil.move(tmp, filename)
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -1048,7 +1019,7 @@ def open_if_filename(str_or_file, mode="r"):
|
||||
|
||||
If it's a file object, just yields the file object.
|
||||
"""
|
||||
if isinstance(str_or_file, six.string_types):
|
||||
if isinstance(str_or_file, str):
|
||||
with open(str_or_file, mode) as f:
|
||||
yield f
|
||||
else:
|
||||
@@ -1338,46 +1309,34 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
|
||||
depth (str): current depth from the root
|
||||
"""
|
||||
dir = os.path.join(root, rel_path)
|
||||
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) # novermin
|
||||
else:
|
||||
dir_entries = os.listdir(dir)
|
||||
dir_entries.sort()
|
||||
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
|
||||
|
||||
for f in dir_entries:
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
||||
else:
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
||||
if not lexists:
|
||||
continue
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
||||
if not isdir and not islink:
|
||||
# handle non-symlink files
|
||||
@@ -1638,14 +1597,14 @@ def find(root, files, recursive=True):
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or Sequence): Library name(s) to search for
|
||||
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
Returns:
|
||||
list: The files that have been found
|
||||
"""
|
||||
if isinstance(files, six.string_types):
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
@@ -1702,14 +1661,14 @@ def _find_non_recursive(root, search_files):
|
||||
# Utilities for libraries and headers
|
||||
|
||||
|
||||
class FileList(Sequence):
|
||||
class FileList(collections.abc.Sequence):
|
||||
"""Sequence of absolute paths to files.
|
||||
|
||||
Provides a few convenience methods to manipulate file paths.
|
||||
"""
|
||||
|
||||
def __init__(self, files):
|
||||
if isinstance(files, six.string_types):
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
|
||||
self.files = list(dedupe(files))
|
||||
@@ -1805,7 +1764,7 @@ def directories(self):
|
||||
def directories(self, value):
|
||||
value = value or []
|
||||
# Accept a single directory as input
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
value = [value]
|
||||
|
||||
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
|
||||
@@ -1941,9 +1900,9 @@ def find_headers(headers, root, recursive=False):
|
||||
Returns:
|
||||
HeaderList: The headers that have been found
|
||||
"""
|
||||
if isinstance(headers, six.string_types):
|
||||
if isinstance(headers, str):
|
||||
headers = [headers]
|
||||
elif not isinstance(headers, Sequence):
|
||||
elif not isinstance(headers, collections.abc.Sequence):
|
||||
message = "{0} expects a string or sequence of strings as the "
|
||||
message += "first argument [got {1} instead]"
|
||||
message = message.format(find_headers.__name__, type(headers))
|
||||
@@ -2107,9 +2066,9 @@ def find_system_libraries(libraries, shared=True):
|
||||
Returns:
|
||||
LibraryList: The libraries that have been found
|
||||
"""
|
||||
if isinstance(libraries, six.string_types):
|
||||
if isinstance(libraries, str):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, Sequence):
|
||||
elif not isinstance(libraries, collections.abc.Sequence):
|
||||
message = "{0} expects a string or sequence of strings as the "
|
||||
message += "first argument [got {1} instead]"
|
||||
message = message.format(find_system_libraries.__name__, type(libraries))
|
||||
@@ -2164,9 +2123,9 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
Returns:
|
||||
LibraryList: The libraries that have been found
|
||||
"""
|
||||
if isinstance(libraries, six.string_types):
|
||||
if isinstance(libraries, str):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, Sequence):
|
||||
elif not isinstance(libraries, collections.abc.Sequence):
|
||||
message = "{0} expects a string or sequence of strings as the "
|
||||
message += "first argument [got {1} instead]"
|
||||
message = message.format(find_libraries.__name__, type(libraries))
|
||||
@@ -2323,10 +2282,17 @@ def add_rpath(self, *paths):
|
||||
"""
|
||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||
|
||||
def _link(self, path, dest):
|
||||
def _link(self, path, dest_dir):
|
||||
"""Perform link step of simulated rpathing, installing
|
||||
simlinks of file in path to the dest_dir
|
||||
location. This method deliberately prevents
|
||||
the case where a path points to a file inside the dest_dir.
|
||||
This is because it is both meaningless from an rpath
|
||||
perspective, and will cause an error when Developer
|
||||
mode is not enabled"""
|
||||
file_name = os.path.basename(path)
|
||||
dest_file = os.path.join(dest, file_name)
|
||||
if os.path.exists(dest):
|
||||
dest_file = os.path.join(dest_dir, file_name)
|
||||
if os.path.exists(dest_dir) and not dest_file == path:
|
||||
try:
|
||||
symlink(path, dest_file)
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
@@ -2340,7 +2306,7 @@ def _link(self, path, dest):
|
||||
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (file_name, dest)
|
||||
% (file_name, dest_dir)
|
||||
)
|
||||
pass
|
||||
else:
|
||||
|
||||
@@ -5,9 +5,11 @@
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -15,11 +17,6 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Callable, Iterable, List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
|
||||
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r"^\.#", "~$"]
|
||||
|
||||
@@ -200,14 +197,9 @@ def _memoized_function(*args, **kwargs):
|
||||
return ret
|
||||
except TypeError as e:
|
||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||
raise six.raise_from(
|
||||
UnhashableArguments(
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(
|
||||
key, func.__name__
|
||||
),
|
||||
),
|
||||
e,
|
||||
)
|
||||
raise UnhashableArguments(
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
|
||||
) from e
|
||||
|
||||
return _memoized_function
|
||||
|
||||
@@ -312,7 +304,7 @@ def lazy_eq(lseq, rseq):
|
||||
# zip_longest is implemented in native code, so use it for speed.
|
||||
# use zip_longest instead of zip because it allows us to tell
|
||||
# which iterator was longer.
|
||||
for left, right in zip_longest(liter, riter, fillvalue=done):
|
||||
for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
|
||||
if (left is done) or (right is done):
|
||||
return False
|
||||
|
||||
@@ -332,7 +324,7 @@ def lazy_lt(lseq, rseq):
|
||||
liter = lseq()
|
||||
riter = rseq()
|
||||
|
||||
for left, right in zip_longest(liter, riter, fillvalue=done):
|
||||
for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
|
||||
if (left is done) or (right is done):
|
||||
return left is done # left was shorter than right
|
||||
|
||||
@@ -482,7 +474,7 @@ def add_func_to_class(name, func):
|
||||
|
||||
|
||||
@lazy_lexicographic_ordering
|
||||
class HashableMap(MutableMapping):
|
||||
class HashableMap(collections.abc.MutableMapping):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
@@ -574,7 +566,7 @@ def match_predicate(*args):
|
||||
|
||||
def match(string):
|
||||
for arg in args:
|
||||
if isinstance(arg, string_types):
|
||||
if isinstance(arg, str):
|
||||
if re.search(arg, string):
|
||||
return True
|
||||
elif isinstance(arg, list) or isinstance(arg, tuple):
|
||||
@@ -749,6 +741,18 @@ def _n_xxx_ago(x):
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
def pretty_seconds_formatter(seconds):
|
||||
if seconds >= 1:
|
||||
multiplier, unit = 1, "s"
|
||||
elif seconds >= 1e-3:
|
||||
multiplier, unit = 1e3, "ms"
|
||||
elif seconds >= 1e-6:
|
||||
multiplier, unit = 1e6, "us"
|
||||
else:
|
||||
multiplier, unit = 1e9, "ns"
|
||||
return lambda s: "%.3f%s" % (multiplier * s, unit)
|
||||
|
||||
|
||||
def pretty_seconds(seconds):
|
||||
"""Seconds to string with appropriate units
|
||||
|
||||
@@ -758,15 +762,7 @@ def pretty_seconds(seconds):
|
||||
Returns:
|
||||
str: Time string with units
|
||||
"""
|
||||
if seconds >= 1:
|
||||
value, unit = seconds, "s"
|
||||
elif seconds >= 1e-3:
|
||||
value, unit = seconds * 1e3, "ms"
|
||||
elif seconds >= 1e-6:
|
||||
value, unit = seconds * 1e6, "us"
|
||||
else:
|
||||
value, unit = seconds * 1e9, "ns"
|
||||
return "%.3f%s" % (value, unit)
|
||||
return pretty_seconds_formatter(seconds)(seconds)
|
||||
|
||||
|
||||
class RequiredAttributeError(ValueError):
|
||||
@@ -887,32 +883,28 @@ def load_module_from_file(module_name, module_path):
|
||||
ImportError: when the module can't be loaded
|
||||
FileNotFoundError: when module_path doesn't exist
|
||||
"""
|
||||
import importlib.util
|
||||
|
||||
if module_name in sys.modules:
|
||||
return sys.modules[module_name]
|
||||
|
||||
# This recipe is adapted from https://stackoverflow.com/a/67692/771663
|
||||
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
|
||||
import importlib.util
|
||||
|
||||
spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
|
||||
module = importlib.util.module_from_spec(spec) # novm
|
||||
# The module object needs to exist in sys.modules before the
|
||||
# loader executes the module code.
|
||||
#
|
||||
# See https://docs.python.org/3/reference/import.html#loading
|
||||
sys.modules[spec.name] = module
|
||||
spec = importlib.util.spec_from_file_location(module_name, module_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
# The module object needs to exist in sys.modules before the
|
||||
# loader executes the module code.
|
||||
#
|
||||
# See https://docs.python.org/3/reference/import.html#loading
|
||||
sys.modules[spec.name] = module
|
||||
try:
|
||||
spec.loader.exec_module(module)
|
||||
except BaseException:
|
||||
try:
|
||||
spec.loader.exec_module(module)
|
||||
except BaseException:
|
||||
try:
|
||||
del sys.modules[spec.name]
|
||||
except KeyError:
|
||||
pass
|
||||
raise
|
||||
elif sys.version_info[0] == 2:
|
||||
import imp
|
||||
|
||||
module = imp.load_source(module_name, module_path)
|
||||
del sys.modules[spec.name]
|
||||
except KeyError:
|
||||
pass
|
||||
raise
|
||||
return module
|
||||
|
||||
|
||||
@@ -998,10 +990,9 @@ def enum(**kwargs):
|
||||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable, # type: Iterable
|
||||
predicate_fn, # type: Callable[[Any], bool]
|
||||
):
|
||||
# type: (...) -> Tuple[List[Any], List[Any]]
|
||||
input_iterable: Iterable,
|
||||
predicate_fn: Callable[[Any], bool],
|
||||
) -> Tuple[List[Any], List[Any]]:
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
Args:
|
||||
@@ -1030,7 +1021,7 @@ def ensure_last(lst, *elements):
|
||||
lst.append(lst.pop(lst.index(elt)))
|
||||
|
||||
|
||||
class TypedMutableSequence(MutableSequence):
|
||||
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||
"""Base class that behaves like a list, just with a different type.
|
||||
|
||||
Client code can inherit from this base class:
|
||||
@@ -1073,23 +1064,20 @@ class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
self.exceptions: List[Tuple[str, Exception, List[str]]] = []
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
def forward(self, context: str) -> "GroupedExceptionForwarder":
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
@@ -1107,8 +1095,7 @@ class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler):
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
|
||||
@@ -9,13 +9,11 @@
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple # novm
|
||||
|
||||
import llnl.util.string
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import pretty_seconds
|
||||
|
||||
import spack.util.string
|
||||
|
||||
if sys.platform != "win32":
|
||||
import fcntl
|
||||
|
||||
@@ -81,7 +79,7 @@ class OpenFileTracker(object):
|
||||
|
||||
def __init__(self):
|
||||
"""Create a new ``OpenFileTracker``."""
|
||||
self._descriptors = {} # type: Dict[Tuple[int, int], OpenFile]
|
||||
self._descriptors = {}
|
||||
|
||||
def get_fh(self, path):
|
||||
"""Get a filehandle for a lockfile.
|
||||
@@ -103,7 +101,7 @@ def get_fh(self, path):
|
||||
try:
|
||||
# see whether we've seen this inode/pid before
|
||||
stat = os.stat(path)
|
||||
key = (stat.st_ino, pid)
|
||||
key = (stat.st_dev, stat.st_ino, pid)
|
||||
open_file = self._descriptors.get(key)
|
||||
|
||||
except OSError as e:
|
||||
@@ -129,32 +127,32 @@ def get_fh(self, path):
|
||||
|
||||
# if we just created the file, we'll need to get its inode here
|
||||
if not stat:
|
||||
inode = os.fstat(fd).st_ino
|
||||
key = (inode, pid)
|
||||
stat = os.fstat(fd)
|
||||
key = (stat.st_dev, stat.st_ino, pid)
|
||||
|
||||
self._descriptors[key] = open_file
|
||||
|
||||
open_file.refs += 1
|
||||
return open_file.fh
|
||||
|
||||
def release_fh(self, path):
|
||||
"""Release a filehandle, only closing it if there are no more references."""
|
||||
try:
|
||||
inode = os.stat(path).st_ino
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # only handle file not found
|
||||
raise
|
||||
inode = None # this will not be in self._descriptors
|
||||
|
||||
key = (inode, os.getpid())
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing lock path: %s" % path
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
del self._descriptors[key]
|
||||
open_file.fh.close()
|
||||
|
||||
def release_by_fh(self, fh):
|
||||
self.release_by_stat(os.fstat(fh.fileno()))
|
||||
|
||||
def purge(self):
|
||||
for key in list(self._descriptors.keys()):
|
||||
self._descriptors[key].fh.close()
|
||||
del self._descriptors[key]
|
||||
|
||||
|
||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||
#: from opening the sam file many times for different byte range locks
|
||||
@@ -166,7 +164,7 @@ def _attempts_str(wait_time, nattempts):
|
||||
if nattempts <= 1:
|
||||
return ""
|
||||
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
attempts = llnl.util.string.plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
@@ -432,8 +430,7 @@ def _unlock(self):
|
||||
|
||||
"""
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
|
||||
file_tracker.release_fh(self.path)
|
||||
file_tracker.release_by_fh(self._file)
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
159
lib/spack/llnl/util/path.py
Normal file
159
lib/spack/llnl/util/path.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Utilities for managing Linux and Windows paths."""
|
||||
|
||||
# TODO: look at using pathlib since we now only support Python 3
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from urllib.parse import urlparse
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def is_path_url(path):
|
||||
if "\\" in path:
|
||||
return False
|
||||
url_tuple = urlparse(path)
|
||||
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
|
||||
|
||||
|
||||
def win_exe_ext():
|
||||
return ".exe"
|
||||
|
||||
|
||||
def path_to_os_path(*pths):
|
||||
"""
|
||||
Takes an arbitrary number of positional parameters
|
||||
converts each arguemnt of type string to use a normalized
|
||||
filepath separator, and returns a list of all values
|
||||
"""
|
||||
ret_pths = []
|
||||
for pth in pths:
|
||||
if isinstance(pth, str) and not is_path_url(pth):
|
||||
pth = convert_to_platform_path(pth)
|
||||
ret_pths.append(pth)
|
||||
return ret_pths
|
||||
|
||||
|
||||
def sanitize_file_path(pth):
|
||||
"""
|
||||
Formats strings to contain only characters that can
|
||||
be used to generate legal file paths.
|
||||
|
||||
Criteria for legal files based on
|
||||
https://en.wikipedia.org/wiki/Filename#Comparison_of_filename_limitations
|
||||
|
||||
Args:
|
||||
pth: string containing path to be created
|
||||
on the host filesystem
|
||||
|
||||
Return:
|
||||
sanitized string that can legally be made into a path
|
||||
"""
|
||||
# on unix, splitting path by seperators will remove
|
||||
# instances of illegal characters on join
|
||||
pth_cmpnts = pth.split(os.path.sep)
|
||||
|
||||
if is_windows:
|
||||
drive_match = r"[a-zA-Z]:"
|
||||
is_abs = bool(re.match(drive_match, pth_cmpnts[0]))
|
||||
drive = pth_cmpnts[0] + os.path.sep if is_abs else ""
|
||||
pth_cmpnts = pth_cmpnts[1:] if drive else pth_cmpnts
|
||||
illegal_chars = r'[<>?:"|*\\]'
|
||||
else:
|
||||
drive = "/" if not pth_cmpnts[0] else ""
|
||||
illegal_chars = r"[/]"
|
||||
|
||||
pth = []
|
||||
for cmp in pth_cmpnts:
|
||||
san_cmp = re.sub(illegal_chars, "", cmp)
|
||||
pth.append(san_cmp)
|
||||
return drive + os.path.join(*pth)
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice=None):
|
||||
"""
|
||||
Filters function arguments to account for platform path separators.
|
||||
Optional slicing range can be specified to select specific arguments
|
||||
|
||||
This decorator takes all (or a slice) of a method's positional arguments
|
||||
and normalizes usage of filepath separators on a per platform basis.
|
||||
|
||||
Note: **kwargs, urls, and any type that is not a string are ignored
|
||||
so in such cases where path normalization is required, that should be
|
||||
handled by calling path_to_os_path directly as needed.
|
||||
|
||||
Parameters:
|
||||
arg_slice (slice): a slice object specifying the slice of arguments
|
||||
in the decorated method over which filepath separators are
|
||||
normalized
|
||||
"""
|
||||
from functools import wraps
|
||||
|
||||
def holder_func(func):
|
||||
@wraps(func)
|
||||
def path_filter_caller(*args, **kwargs):
|
||||
args = list(args)
|
||||
if arg_slice:
|
||||
args[arg_slice] = path_to_os_path(*args[arg_slice])
|
||||
else:
|
||||
args = path_to_os_path(*args)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return path_filter_caller
|
||||
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
|
||||
|
||||
class Path:
|
||||
"""
|
||||
Describes the filepath separator types
|
||||
in an enum style
|
||||
with a helper attribute
|
||||
exposing the path type of
|
||||
the current platform.
|
||||
"""
|
||||
|
||||
unix = 0
|
||||
windows = 1
|
||||
platform_path = windows if is_windows else unix
|
||||
|
||||
|
||||
def format_os_path(path, mode=Path.unix):
|
||||
"""
|
||||
Format path to use consistent, platform specific
|
||||
separators. Absolute paths are converted between
|
||||
drive letters and a prepended '/' as per platform
|
||||
requirement.
|
||||
|
||||
Parameters:
|
||||
path (str): the path to be normalized, must be a string
|
||||
or expose the replace method.
|
||||
mode (Path): the path filesperator style to normalize the
|
||||
passed path to. Default is unix style, i.e. '/'
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
if mode == Path.windows:
|
||||
path = path.replace("/", "\\")
|
||||
else:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def convert_to_posix_path(path):
|
||||
return format_os_path(path, mode=Path.unix)
|
||||
|
||||
|
||||
def convert_to_windows_path(path):
|
||||
return format_os_path(path, mode=Path.windows)
|
||||
|
||||
|
||||
def convert_to_platform_path(path):
|
||||
return format_os_path(path, mode=Path.platform_path)
|
||||
@@ -23,8 +23,11 @@ def symlink(real_path, link_path):
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
if not is_windows or _win32_can_symlink():
|
||||
if not is_windows:
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
try:
|
||||
# Try to use junctions
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
@@ -14,10 +15,6 @@
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
|
||||
import six
|
||||
from six import StringIO
|
||||
from six.moves import input
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
import termios
|
||||
@@ -183,7 +180,7 @@ def msg(message, *args, **kwargs):
|
||||
else:
|
||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
for arg in args:
|
||||
print(indent + _output_filter(six.text_type(arg)))
|
||||
print(indent + _output_filter(str(arg)))
|
||||
|
||||
|
||||
def info(message, *args, **kwargs):
|
||||
@@ -201,13 +198,13 @@ def info(message, *args, **kwargs):
|
||||
st_text = process_stacktrace(st_countback)
|
||||
cprint(
|
||||
"@%s{%s==>} %s%s"
|
||||
% (format, st_text, get_timestamp(), cescape(_output_filter(six.text_type(message)))),
|
||||
% (format, st_text, get_timestamp(), cescape(_output_filter(str(message)))),
|
||||
stream=stream,
|
||||
)
|
||||
for arg in args:
|
||||
if wrap:
|
||||
lines = textwrap.wrap(
|
||||
_output_filter(six.text_type(arg)),
|
||||
_output_filter(str(arg)),
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
break_long_words=break_long_words,
|
||||
@@ -215,7 +212,7 @@ def info(message, *args, **kwargs):
|
||||
for line in lines:
|
||||
stream.write(line + "\n")
|
||||
else:
|
||||
stream.write(indent + _output_filter(six.text_type(arg)) + "\n")
|
||||
stream.write(indent + _output_filter(str(arg)) + "\n")
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
@@ -238,7 +235,7 @@ def error(message, *args, **kwargs):
|
||||
|
||||
kwargs.setdefault("format", "*r")
|
||||
kwargs.setdefault("stream", sys.stderr)
|
||||
info("Error: " + six.text_type(message), *args, **kwargs)
|
||||
info("Error: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def warn(message, *args, **kwargs):
|
||||
@@ -247,7 +244,7 @@ def warn(message, *args, **kwargs):
|
||||
|
||||
kwargs.setdefault("format", "*Y")
|
||||
kwargs.setdefault("stream", sys.stderr)
|
||||
info("Warning: " + six.text_type(message), *args, **kwargs)
|
||||
info("Warning: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs):
|
||||
@@ -271,7 +268,7 @@ def get_number(prompt, **kwargs):
|
||||
while number is None:
|
||||
msg(prompt, newline=False)
|
||||
ans = input()
|
||||
if ans == six.text_type(abort):
|
||||
if ans == str(abort):
|
||||
return None
|
||||
|
||||
if ans:
|
||||
@@ -336,11 +333,11 @@ def hline(label=None, **kwargs):
|
||||
cols -= 2
|
||||
cols = min(max_width, cols)
|
||||
|
||||
label = six.text_type(label)
|
||||
label = str(label)
|
||||
prefix = char * 2 + " "
|
||||
suffix = " " + (cols - len(prefix) - clen(label)) * char
|
||||
|
||||
out = StringIO()
|
||||
out = io.StringIO()
|
||||
out.write(prefix)
|
||||
out.write(label)
|
||||
out.write(suffix)
|
||||
@@ -372,10 +369,5 @@ def ioctl_gwinsz(fd):
|
||||
|
||||
return int(rc[0]), int(rc[1])
|
||||
else:
|
||||
if sys.version_info[0] < 3:
|
||||
raise RuntimeError(
|
||||
"Terminal size not obtainable on Windows with a\
|
||||
Python version older than 3"
|
||||
)
|
||||
rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
|
||||
return int(rc[0]), int(rc[1])
|
||||
|
||||
@@ -8,11 +8,10 @@
|
||||
"""
|
||||
from __future__ import division, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
||||
from six import StringIO, text_type
|
||||
|
||||
from llnl.util.tty import terminal_size
|
||||
from llnl.util.tty.color import cextra, clen
|
||||
|
||||
@@ -134,7 +133,7 @@ def colify(elts, **options):
|
||||
)
|
||||
|
||||
# elts needs to be an array of strings so we can count the elements
|
||||
elts = [text_type(elt) for elt in elts]
|
||||
elts = [str(elt) for elt in elts]
|
||||
if not elts:
|
||||
return (0, ())
|
||||
|
||||
@@ -232,7 +231,7 @@ def transpose():
|
||||
def colified(elts, **options):
|
||||
"""Invokes the ``colify()`` function but returns the result as a string
|
||||
instead of writing it to an output string."""
|
||||
sio = StringIO()
|
||||
sio = io.StringIO()
|
||||
options["output"] = sio
|
||||
colify(elts, **options)
|
||||
return sio.getvalue()
|
||||
|
||||
@@ -65,8 +65,6 @@
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class ColorParseError(Exception):
|
||||
"""Raised when a color format fails to parse."""
|
||||
@@ -259,7 +257,7 @@ def cescape(string):
|
||||
Returns:
|
||||
(str): the string with color codes escaped
|
||||
"""
|
||||
string = six.text_type(string)
|
||||
string = str(string)
|
||||
string = string.replace("@", "@@")
|
||||
string = string.replace("}", "}}")
|
||||
return string
|
||||
|
||||
@@ -21,14 +21,12 @@
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from threading import Thread
|
||||
from types import ModuleType # novm
|
||||
from typing import Optional # novm
|
||||
|
||||
from six import StringIO, string_types
|
||||
from types import ModuleType
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
termios = None # type: Optional[ModuleType]
|
||||
termios: Optional[ModuleType] = None
|
||||
try:
|
||||
import termios as term_mod
|
||||
|
||||
@@ -241,8 +239,7 @@ def __exit__(self, exc_type, exception, traceback):
|
||||
"""If termios was available, restore old settings."""
|
||||
if self.old_cfg:
|
||||
self._restore_default_terminal_settings()
|
||||
if sys.version_info >= (3,):
|
||||
atexit.unregister(self._restore_default_terminal_settings)
|
||||
atexit.unregister(self._restore_default_terminal_settings)
|
||||
|
||||
# restore SIGSTP and SIGCONT handlers
|
||||
if self.old_handlers:
|
||||
@@ -309,7 +306,7 @@ def __init__(self, file_like):
|
||||
|
||||
self.file_like = file_like
|
||||
|
||||
if isinstance(file_like, string_types):
|
||||
if isinstance(file_like, str):
|
||||
self.open = True
|
||||
elif _file_descriptors_work(file_like):
|
||||
self.open = False
|
||||
@@ -323,12 +320,9 @@ def __init__(self, file_like):
|
||||
def unwrap(self):
|
||||
if self.open:
|
||||
if self.file_like:
|
||||
if sys.version_info < (3,):
|
||||
self.file = open(self.file_like, "w")
|
||||
else:
|
||||
self.file = open(self.file_like, "w", encoding="utf-8") # novm
|
||||
self.file = open(self.file_like, "w", encoding="utf-8")
|
||||
else:
|
||||
self.file = StringIO()
|
||||
self.file = io.StringIO()
|
||||
return self.file
|
||||
else:
|
||||
# We were handed an already-open file object. In this case we also
|
||||
@@ -699,13 +693,10 @@ def __init__(self, sys_attr):
|
||||
self.sys_attr = sys_attr
|
||||
self.saved_stream = None
|
||||
if sys.platform.startswith("win32"):
|
||||
if sys.version_info < (3, 5):
|
||||
libc = ctypes.CDLL(ctypes.util.find_library("c"))
|
||||
if hasattr(sys, "gettotalrefcount"): # debug build
|
||||
libc = ctypes.CDLL("ucrtbased")
|
||||
else:
|
||||
if hasattr(sys, "gettotalrefcount"): # debug build
|
||||
libc = ctypes.CDLL("ucrtbased")
|
||||
else:
|
||||
libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
|
||||
libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
|
||||
|
||||
kernel32 = ctypes.WinDLL("kernel32")
|
||||
|
||||
@@ -794,7 +785,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == StringIO:
|
||||
if type(self.logfile) == io.StringIO:
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
@@ -927,13 +918,10 @@ def _writer_daemon(
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
os.close(write_fd)
|
||||
|
||||
# Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
if sys.version_info < (3,):
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1)
|
||||
else:
|
||||
# Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
@@ -1023,7 +1011,7 @@ def _writer_daemon(
|
||||
|
||||
finally:
|
||||
# send written data back to parent if we used a StringIO
|
||||
if isinstance(log_file, StringIO):
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_pipe.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||
|
||||
@@ -24,8 +24,7 @@
|
||||
import traceback
|
||||
|
||||
import llnl.util.tty.log as log
|
||||
|
||||
from spack.util.executable import which
|
||||
from llnl.util.executable import which
|
||||
|
||||
termios = None
|
||||
try:
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.19.0.dev0"
|
||||
__version__ = "0.20.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
||||
@@ -5,12 +5,12 @@
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.executable import Executable, ProcessError
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class ABI(object):
|
||||
|
||||
@@ -37,15 +37,14 @@ def _search_duplicate_compilers(error_cls):
|
||||
"""
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import inspect
|
||||
import itertools
|
||||
import pickle
|
||||
import re
|
||||
|
||||
from six.moves.urllib.request import urlopen
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.compat import Sequence
|
||||
|
||||
import spack.config
|
||||
import spack.patch
|
||||
@@ -81,7 +80,7 @@ def __hash__(self):
|
||||
return hash(value)
|
||||
|
||||
|
||||
class AuditClass(Sequence):
|
||||
class AuditClass(collections.abc.Sequence):
|
||||
def __init__(self, group, tag, description, kwargs):
|
||||
"""Return an object that acts as a decorator to register functions
|
||||
associated with a specific class of sanity checks.
|
||||
@@ -288,7 +287,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
test_callbacks = pkg_cls.build_time_test_callbacks
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
if test_callbacks and "test" in test_callbacks:
|
||||
msg = '{0} package contains "test" method in ' "build_time_test_callbacks"
|
||||
|
||||
@@ -9,21 +9,26 @@
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
from six.moves.urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.executable import which
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||
|
||||
import spack.cmd
|
||||
@@ -38,6 +43,7 @@
|
||||
import spack.store
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
@@ -46,7 +52,6 @@
|
||||
from spack.relocate import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
_build_cache_relative_path = "build_cache"
|
||||
_build_cache_keys_relative_path = "_pgp"
|
||||
@@ -266,10 +271,7 @@ def find_by_hash(self, find_hash, mirrors_to_check=None):
|
||||
None, just assumes all configured mirrors.
|
||||
"""
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
# Not found in the cached index, pull the latest from the server.
|
||||
self.update(with_cooldown=True)
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
return []
|
||||
results = self._mirrors_for_spec[find_hash]
|
||||
if not mirrors_to_check:
|
||||
return results
|
||||
@@ -345,7 +347,6 @@ def update(self, with_cooldown=False):
|
||||
|
||||
for cached_mirror_url in self._local_index_cache:
|
||||
cache_entry = self._local_index_cache[cached_mirror_url]
|
||||
cached_index_hash = cache_entry["index_hash"]
|
||||
cached_index_path = cache_entry["index_path"]
|
||||
if cached_mirror_url in configured_mirror_urls:
|
||||
# Only do a fetch if the last fetch was longer than TTL ago
|
||||
@@ -364,13 +365,14 @@ def update(self, with_cooldown=False):
|
||||
# May need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url, expect_hash=cached_index_hash
|
||||
cached_mirror_url,
|
||||
cache_entry=cache_entry,
|
||||
)
|
||||
self._last_fetch_times[cached_mirror_url] = (now, True)
|
||||
all_methods_failed = False
|
||||
except FetchCacheError as fetch_error:
|
||||
except FetchIndexError as e:
|
||||
needs_regen = False
|
||||
fetch_errors.extend(fetch_error.errors)
|
||||
fetch_errors.append(e)
|
||||
self._last_fetch_times[cached_mirror_url] = (now, False)
|
||||
# The need to regenerate implies a need to clear as well.
|
||||
spec_cache_clear_needed |= needs_regen
|
||||
@@ -399,29 +401,36 @@ def update(self, with_cooldown=False):
|
||||
# already have in our cache must be fetched, stored, and represented
|
||||
# locally.
|
||||
for mirror_url in configured_mirror_urls:
|
||||
if mirror_url not in self._local_index_cache:
|
||||
# Need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(mirror_url)
|
||||
self._last_fetch_times[mirror_url] = (now, True)
|
||||
all_methods_failed = False
|
||||
except FetchCacheError as fetch_error:
|
||||
fetch_errors.extend(fetch_error.errors)
|
||||
needs_regen = False
|
||||
self._last_fetch_times[mirror_url] = (now, False)
|
||||
# Generally speaking, a new mirror wouldn't imply the need to
|
||||
# clear the spec cache, so leave it as is.
|
||||
if needs_regen:
|
||||
spec_cache_regenerate_needed = True
|
||||
if mirror_url in self._local_index_cache:
|
||||
continue
|
||||
|
||||
# Need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(mirror_url)
|
||||
self._last_fetch_times[mirror_url] = (now, True)
|
||||
all_methods_failed = False
|
||||
except FetchIndexError as e:
|
||||
fetch_errors.append(e)
|
||||
needs_regen = False
|
||||
self._last_fetch_times[mirror_url] = (now, False)
|
||||
# Generally speaking, a new mirror wouldn't imply the need to
|
||||
# clear the spec cache, so leave it as is.
|
||||
if needs_regen:
|
||||
spec_cache_regenerate_needed = True
|
||||
|
||||
self._write_local_index_cache()
|
||||
|
||||
if all_methods_failed:
|
||||
raise FetchCacheError(fetch_errors)
|
||||
elif spec_cache_regenerate_needed:
|
||||
if fetch_errors:
|
||||
tty.warn(
|
||||
"The following issues were ignored while updating the indices of binary caches",
|
||||
FetchCacheError(fetch_errors),
|
||||
)
|
||||
if spec_cache_regenerate_needed:
|
||||
self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed)
|
||||
|
||||
def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
"""Fetch a buildcache index file from a remote mirror and cache it.
|
||||
|
||||
If we already have a cached index from this mirror, then we first
|
||||
@@ -429,102 +438,50 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
|
||||
Args:
|
||||
mirror_url (str): Base url of mirror
|
||||
expect_hash (str): If provided, this hash will be compared against
|
||||
the index hash we retrieve from the mirror, to determine if we
|
||||
need to fetch the index or not.
|
||||
cache_entry (dict): Old cache metadata with keys ``index_hash``, ``index_path``,
|
||||
``etag``
|
||||
|
||||
Returns:
|
||||
True if this function thinks the concrete spec cache,
|
||||
``_mirrors_for_spec``, should be regenerated. Returns False
|
||||
otherwise.
|
||||
Throws:
|
||||
FetchCacheError: a composite exception.
|
||||
"""
|
||||
index_fetch_url = url_util.join(mirror_url, _build_cache_relative_path, "index.json")
|
||||
hash_fetch_url = url_util.join(mirror_url, _build_cache_relative_path, "index.json.hash")
|
||||
True if the local index.json was updated.
|
||||
|
||||
if not web_util.url_exists(index_fetch_url):
|
||||
# A binary mirror is not required to have an index, so avoid
|
||||
# raising FetchCacheError in that case.
|
||||
Throws:
|
||||
FetchIndexError
|
||||
"""
|
||||
# TODO: get rid of this request, handle 404 better
|
||||
if not web_util.url_exists(
|
||||
url_util.join(mirror_url, _build_cache_relative_path, "index.json")
|
||||
):
|
||||
return False
|
||||
|
||||
old_cache_key = None
|
||||
fetched_hash = None
|
||||
|
||||
errors = []
|
||||
|
||||
# Fetch the hash first so we can check if we actually need to fetch
|
||||
# the index itself.
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(hash_fetch_url)
|
||||
fetched_hash = codecs.getreader("utf-8")(fs).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
errors.append(
|
||||
RuntimeError(
|
||||
"Unable to read index hash {0} due to {1}: {2}".format(
|
||||
hash_fetch_url, url_err.__class__.__name__, str(url_err)
|
||||
)
|
||||
)
|
||||
etag = cache_entry.get("etag", None)
|
||||
if etag:
|
||||
fetcher = EtagIndexFetcher(mirror_url, etag)
|
||||
else:
|
||||
fetcher = DefaultIndexFetcher(
|
||||
mirror_url, local_hash=cache_entry.get("index_hash", None)
|
||||
)
|
||||
|
||||
# The only case where we'll skip attempting to fetch the buildcache
|
||||
# index from the mirror is when we already have a hash for this
|
||||
# mirror, we were able to retrieve one from the mirror, and
|
||||
# the two hashes are the same.
|
||||
if expect_hash and fetched_hash:
|
||||
if fetched_hash == expect_hash:
|
||||
tty.debug("Cached index for {0} already up to date".format(mirror_url))
|
||||
return False
|
||||
else:
|
||||
# We expected a hash, we fetched a hash, and they were not the
|
||||
# same. If we end up fetching an index successfully and
|
||||
# replacing our entry for this mirror, we should clean up the
|
||||
# existing cache file
|
||||
if mirror_url in self._local_index_cache:
|
||||
existing_entry = self._local_index_cache[mirror_url]
|
||||
old_cache_key = existing_entry["index_path"]
|
||||
result = fetcher.conditional_fetch()
|
||||
|
||||
tty.debug("Fetching index from {0}".format(index_fetch_url))
|
||||
|
||||
# Fetch index itself
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(index_fetch_url)
|
||||
index_object_str = codecs.getreader("utf-8")(fs).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
errors.append(
|
||||
RuntimeError(
|
||||
"Unable to read index {0} due to {1}: {2}".format(
|
||||
index_fetch_url, url_err.__class__.__name__, str(url_err)
|
||||
)
|
||||
)
|
||||
)
|
||||
raise FetchCacheError(errors)
|
||||
|
||||
locally_computed_hash = compute_hash(index_object_str)
|
||||
|
||||
if fetched_hash is not None and locally_computed_hash != fetched_hash:
|
||||
msg = (
|
||||
"Computed hash ({0}) did not match remote ({1}), "
|
||||
"indicating error in index transmission"
|
||||
).format(locally_computed_hash, expect_hash)
|
||||
errors.append(RuntimeError(msg))
|
||||
# We somehow got an index that doesn't match the remote one, maybe
|
||||
# the next time we try we'll be successful.
|
||||
raise FetchCacheError(errors)
|
||||
# Nothing to do
|
||||
if result.fresh:
|
||||
return False
|
||||
|
||||
# Persist new index.json
|
||||
url_hash = compute_hash(mirror_url)
|
||||
|
||||
cache_key = "{0}_{1}.json".format(url_hash[:10], locally_computed_hash[:10])
|
||||
cache_key = "{}_{}.json".format(url_hash[:10], result.hash[:10])
|
||||
self._index_file_cache.init_entry(cache_key)
|
||||
with self._index_file_cache.write_transaction(cache_key) as (old, new):
|
||||
new.write(index_object_str)
|
||||
new.write(result.data)
|
||||
|
||||
self._local_index_cache[mirror_url] = {
|
||||
"index_hash": locally_computed_hash,
|
||||
"index_hash": result.hash,
|
||||
"index_path": cache_key,
|
||||
"etag": result.etag,
|
||||
}
|
||||
|
||||
# clean up the old cache_key if necessary
|
||||
old_cache_key = cache_entry.get("index_path", None)
|
||||
if old_cache_key:
|
||||
self._index_file_cache.remove(old_cache_key)
|
||||
|
||||
@@ -621,7 +578,9 @@ class UnsignedPackageException(spack.error.SpackError):
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
return hashlib.sha256(data.encode("utf-8")).hexdigest()
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf-8")
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def build_cache_relative_path():
|
||||
@@ -914,8 +873,6 @@ def _fetch_spec_from_mirror(spec_url):
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
if spec_url.endswith(".yaml"):
|
||||
return Spec.from_yaml(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
@@ -990,8 +947,6 @@ def file_read_method(file_path):
|
||||
"*.spec.json.sig",
|
||||
"--include",
|
||||
"*.spec.json",
|
||||
"--include",
|
||||
"*.spec.yaml",
|
||||
cache_prefix,
|
||||
tmpspecsdir,
|
||||
]
|
||||
@@ -1001,7 +956,7 @@ def file_read_method(file_path):
|
||||
"Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir)
|
||||
)
|
||||
aws(*sync_command_args, output=os.devnull, error=os.devnull)
|
||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"])
|
||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"])
|
||||
read_fn = file_read_method
|
||||
except Exception:
|
||||
tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch")
|
||||
@@ -1037,9 +992,7 @@ def url_read_method(url):
|
||||
file_list = [
|
||||
url_util.join(cache_prefix, entry)
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith(".yaml")
|
||||
or entry.endswith("spec.json")
|
||||
or entry.endswith("spec.json.sig")
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
@@ -1101,14 +1054,6 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
tty.error("Unabled to generate package index, {0}".format(err))
|
||||
return
|
||||
|
||||
if any(x.endswith(".yaml") for x in file_list):
|
||||
msg = (
|
||||
"The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for "
|
||||
"this format will be removed in v0.20, please regenerate the build cache with a "
|
||||
"recent Spack\n"
|
||||
).format(cache_prefix)
|
||||
warnings.warn(msg)
|
||||
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
@@ -1195,7 +1140,7 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
|
||||
def _build_tarball(
|
||||
spec,
|
||||
outdir,
|
||||
out_url,
|
||||
force=False,
|
||||
relative=False,
|
||||
unsigned=False,
|
||||
@@ -1218,8 +1163,7 @@ def _build_tarball(
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
||||
|
||||
remote_spackfile_path = url_util.join(outdir, os.path.relpath(spackfile_path, tmpdir))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, tmpdir))
|
||||
|
||||
mkdirp(tarfile_dir)
|
||||
if web_util.url_exists(remote_spackfile_path):
|
||||
@@ -1236,15 +1180,11 @@ def _build_tarball(
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||
deprecated_specfile_path = specfile_path.replace(".spec.json", ".spec.yaml")
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||
out_url, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||
)
|
||||
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path, os.path.realpath(tmpdir))
|
||||
)
|
||||
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if force:
|
||||
@@ -1252,12 +1192,8 @@ def _build_tarball(
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (
|
||||
web_util.url_exists(remote_specfile_path)
|
||||
or web_util.url_exists(remote_signed_specfile_path)
|
||||
or web_util.url_exists(remote_specfile_path_deprecated)
|
||||
elif web_util.url_exists(remote_specfile_path) or web_util.url_exists(
|
||||
remote_signed_specfile_path
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1313,12 +1249,10 @@ def _build_tarball(
|
||||
|
||||
with open(spec_file, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
if spec_file.endswith(".yaml"):
|
||||
spec_dict = yaml.load(content)
|
||||
elif spec_file.endswith(".json"):
|
||||
if spec_file.endswith(".json"):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type (json or yaml)".format(spec_file))
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
@@ -1353,12 +1287,12 @@ def _build_tarball(
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
if not unsigned:
|
||||
push_keys(outdir, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
|
||||
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
|
||||
|
||||
# create an index.json for the build_cache directory so specs can be
|
||||
# found
|
||||
if regenerate_index:
|
||||
generate_package_index(url_util.join(outdir, os.path.relpath(cache_prefix, tmpdir)))
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, tmpdir)))
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@@ -1539,7 +1473,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ["json.sig", "json", "yaml"]:
|
||||
for ext in ["json.sig", "json"]:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
|
||||
spackfile_url = mirror_to_try["spackfile"]
|
||||
@@ -1576,13 +1510,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
if ext == "yaml":
|
||||
msg = (
|
||||
"Reading {} from mirror.\n\n\tThe YAML format for buildcaches is "
|
||||
"deprecated and will be removed in v0.20\n"
|
||||
).format(spackfile_url)
|
||||
warnings.warn(msg)
|
||||
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
@@ -1606,10 +1533,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
)
|
||||
)
|
||||
|
||||
tty.warn(
|
||||
"download_tarball() was unable to download "
|
||||
+ "{0} from any configured mirrors".format(spec)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@@ -1634,7 +1557,7 @@ def make_package_relative(workdir, spec, allow_root):
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for linkname in buildinfo.get("relocate_links", []):
|
||||
@@ -1652,7 +1575,7 @@ def check_package_relocatable(workdir, spec, allow_root):
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
@@ -1826,8 +1749,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, ".tar.gz")
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
deprecated_yaml_name = tarball_name(spec, ".spec.yaml")
|
||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, ".spec.json")
|
||||
json_path = os.path.join(extract_to, json_name)
|
||||
with closing(tarfile.open(spackfile_path, "r")) as tar:
|
||||
@@ -1839,8 +1760,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||
|
||||
@@ -1887,10 +1806,8 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
content = inputfile.read()
|
||||
if specfile_path.endswith(".json.sig"):
|
||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||
elif specfile_path.endswith(".json"):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
spec_dict = sjson.load(content)
|
||||
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
@@ -1902,7 +1819,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
or int(spec_dict["buildcache_layout_version"]) < 1
|
||||
):
|
||||
# Handle the older buildcache layout where the .spack file
|
||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
||||
# contains a spec json, maybe an .asc file (signature),
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
@@ -2053,17 +1970,12 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, ".spec.yaml")
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
signed_specfile_name = tarball_name(spec, ".spec.json.sig")
|
||||
specfile_is_signed = False
|
||||
specfile_is_json = True
|
||||
found_specs = []
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name
|
||||
)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||
)
|
||||
@@ -2077,28 +1989,19 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_yaml),
|
||||
url_err_y,
|
||||
level=2,
|
||||
)
|
||||
continue
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
@@ -2107,10 +2010,8 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
found_specs.append(
|
||||
@@ -2132,8 +2033,8 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||
with the mirrors in this dictionary.
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||
files from remote mirrors, only consider the indices.
|
||||
index_only (bool): When ``index_only`` is set to ``True``, only the local
|
||||
cache is checked, no requests are made.
|
||||
|
||||
Return:
|
||||
A list of objects, each containing a ``mirror_url`` and ``spec`` key
|
||||
@@ -2321,7 +2222,7 @@ def needs_rebuild(spec, mirror_url):
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
|
||||
# Only check for the presence of the json version of the spec. If the
|
||||
# mirror only has the yaml version, or doesn't have the spec at all, we
|
||||
# mirror only has the json version, or doesn't have the spec at all, we
|
||||
# need to rebuild.
|
||||
return not web_util.url_exists(specfile_path)
|
||||
|
||||
@@ -2429,7 +2330,6 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
||||
"url": [
|
||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||
tarball_name(concrete_spec, ".spec.json"),
|
||||
tarball_name(concrete_spec, ".spec.yaml"),
|
||||
],
|
||||
"path": destination,
|
||||
"required": True,
|
||||
@@ -2470,3 +2370,126 @@ def __call__(self, spec, **kwargs):
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
def __str__(self):
|
||||
if len(self.args) == 1:
|
||||
return str(self.args[0])
|
||||
else:
|
||||
return "{}, due to: {}".format(self.args[0], self.args[1])
|
||||
|
||||
|
||||
FetchIndexResult = collections.namedtuple("FetchIndexResult", "etag hash data fresh")
|
||||
|
||||
|
||||
class DefaultIndexFetcher:
|
||||
"""Fetcher for index.json, using separate index.json.hash as cache invalidation strategy"""
|
||||
|
||||
def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
self.url = url
|
||||
self.local_hash = local_hash
|
||||
self.urlopen = urlopen
|
||||
self.headers = {"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
except urllib.error.URLError:
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
remote_hash = response.read(64)
|
||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
|
||||
def conditional_fetch(self):
|
||||
# Do an intermediate fetch for the hash
|
||||
# and a conditional fetch for the contents
|
||||
|
||||
# Early exit if our cache is up to date.
|
||||
if self.local_hash and self.local_hash == self.get_remote_hash():
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, _build_cache_relative_path, "index.json")
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e)
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
return FetchCacheError("Remote index {} is invalid".format(url_index), e)
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
# We don't handle computed_hash != remote_hash here, which can happen
|
||||
# when remote index.json and index.json.hash are out of sync, or if
|
||||
# the hash algorithm changed.
|
||||
# The most likely scenario is that we got index.json got updated
|
||||
# while we fetched index.json.hash. Warning about an issue thus feels
|
||||
# wrong, as it's more of an issue with race conditions in the cache
|
||||
# invalidation strategy.
|
||||
|
||||
# For now we only handle etags on http(s), since 304 error handling
|
||||
# in s3:// is not there yet.
|
||||
if urllib.parse.urlparse(self.url).scheme not in ("http", "https"):
|
||||
etag = None
|
||||
else:
|
||||
etag = web_util.parse_etag(
|
||||
response.headers.get("Etag", None) or response.headers.get("etag", None)
|
||||
)
|
||||
|
||||
return FetchIndexResult(
|
||||
etag=etag,
|
||||
hash=computed_hash,
|
||||
data=result,
|
||||
fresh=False,
|
||||
)
|
||||
|
||||
|
||||
class EtagIndexFetcher:
|
||||
"""Fetcher for index.json, using ETags headers as cache invalidation strategy"""
|
||||
|
||||
def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
self.url = url
|
||||
self.etag = etag
|
||||
self.urlopen = urlopen
|
||||
|
||||
def conditional_fetch(self):
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, _build_cache_relative_path, "index.json")
|
||||
headers = {
|
||||
"User-Agent": web_util.SPACK_USER_AGENT,
|
||||
"If-None-Match": '"{}"'.format(self.etag),
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.getcode() == 304:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
|
||||
|
||||
headers = response.headers
|
||||
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
|
||||
return FetchIndexResult(
|
||||
etag=web_util.parse_etag(etag_header_value),
|
||||
hash=compute_hash(result),
|
||||
data=result,
|
||||
fresh=False,
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
25
lib/spack/spack/bootstrap/__init__.py
Normal file
25
lib/spack/spack/bootstrap/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_core_dependencies,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
__all__ = [
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
"all_core_root_specs",
|
||||
"ensure_environment_dependencies",
|
||||
"BootstrapEnvironment",
|
||||
"status_message",
|
||||
]
|
||||
218
lib/spack/spack/bootstrap/_common.py
Normal file
218
lib/spack/spack/bootstrap/_common.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.envmod
|
||||
import llnl.util.executable
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.store
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
def _python_import(module):
|
||||
try:
|
||||
__import__(module)
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _try_import_from_store(module, query_spec, query_info=None):
|
||||
"""Return True if the module can be imported from an already
|
||||
installed spec, False otherwise.
|
||||
|
||||
Args:
|
||||
module: Python module to be imported
|
||||
query_spec: spec that may provide the module
|
||||
query_info (dict or None): if a dict is passed it is populated with the
|
||||
command found and the concrete spec providing it
|
||||
"""
|
||||
# If it is a string assume it's one of the root specs by this module
|
||||
if isinstance(query_spec, str):
|
||||
# We have to run as part of this python interpreter
|
||||
query_spec += " ^" + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
] # type: list[str]
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = (
|
||||
f"[BOOTSTRAP MODULE {module}] The installed spec "
|
||||
f'"{query_spec}/{candidate_spec.dag_hash()}" '
|
||||
f'provides the "{module}" Python module'
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info["spec"] = candidate_spec
|
||||
return True
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
msg = (
|
||||
"unexpected error while trying to import module "
|
||||
f'"{module}" from spec "{candidate_spec}" [error="{str(exc)}"]'
|
||||
)
|
||||
warnings.warn(msg)
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
warnings.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = path_before
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _fix_ext_suffix(candidate_spec):
|
||||
"""Fix the external suffixes of Python extensions on the fly for
|
||||
platforms that may need it
|
||||
|
||||
Args:
|
||||
candidate_spec (Spec): installed spec with a Python module
|
||||
to be checked.
|
||||
"""
|
||||
# Here we map target families to the patterns expected
|
||||
# by pristine CPython. Only architectures with known issues
|
||||
# are included. Known issues:
|
||||
#
|
||||
# [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734
|
||||
#
|
||||
_suffix_to_be_checked = {
|
||||
"ppc64le": {
|
||||
"glob": "*.cpython-*-powerpc64le-linux-gnu.so",
|
||||
"re": r".cpython-[\w]*-powerpc64le-linux-gnu.so",
|
||||
"fmt": r"{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so",
|
||||
}
|
||||
}
|
||||
|
||||
# If the current architecture is not problematic return
|
||||
generic_target = archspec.cpu.host().family
|
||||
if str(generic_target) not in _suffix_to_be_checked:
|
||||
return
|
||||
|
||||
# If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches
|
||||
# the expectations, return since the package is surely good
|
||||
ext_suffix = sysconfig.get_config_var("EXT_SUFFIX")
|
||||
if ext_suffix is None:
|
||||
return
|
||||
|
||||
expected = _suffix_to_be_checked[str(generic_target)]
|
||||
if fnmatch.fnmatch(ext_suffix, expected["glob"]):
|
||||
return
|
||||
|
||||
# If we are here it means the current interpreter expects different names
|
||||
# than pristine CPython. So:
|
||||
# 1. Find what we have installed
|
||||
# 2. Create symbolic links for the other names, it they're not there already
|
||||
|
||||
# Check if standard names are installed and if we have to create
|
||||
# link for this interpreter
|
||||
standard_extensions = fs.find(candidate_spec.prefix, expected["glob"])
|
||||
link_names = [re.sub(expected["re"], ext_suffix, s) for s in standard_extensions]
|
||||
for file_name, link_name in zip(standard_extensions, link_names):
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(file_name, link_name)
|
||||
|
||||
# Check if this interpreter installed something and we have to create
|
||||
# links for a standard CPython interpreter
|
||||
non_standard_extensions = fs.find(candidate_spec.prefix, "*" + ext_suffix)
|
||||
for abs_path in non_standard_extensions:
|
||||
directory, filename = os.path.split(abs_path)
|
||||
module = filename.split(".")[0]
|
||||
link_name = os.path.join(
|
||||
directory,
|
||||
expected["fmt"].format(
|
||||
module=module, major=sys.version_info[0], minor=sys.version_info[1]
|
||||
),
|
||||
)
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(abs_path, link_name)
|
||||
|
||||
|
||||
def _executables_in_store(executables, query_spec, query_info=None):
|
||||
"""Return True if at least one of the executables can be retrieved from
|
||||
a spec in store, False otherwise.
|
||||
|
||||
The different executables must provide the same functionality and are
|
||||
"alternate" to each other, i.e. the function will exit True on the first
|
||||
executable found.
|
||||
|
||||
Args:
|
||||
executables: list of executables to be searched
|
||||
query_spec: spec that may provide the executable
|
||||
query_info (dict or None): if a dict is passed it is populated with the
|
||||
command found and the concrete spec providing it
|
||||
"""
|
||||
executables_str = ", ".join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, query_spec))
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
# IF we have a "bin" directory and it contains
|
||||
# the executables we are looking for
|
||||
if (
|
||||
os.path.exists(bin_dir)
|
||||
and os.path.isdir(bin_dir)
|
||||
and llnl.util.executable.which_string(*executables, path=bin_dir)
|
||||
):
|
||||
llnl.util.envmod.path_put_first("PATH", [bin_dir])
|
||||
if query_info is not None:
|
||||
query_info["command"] = llnl.util.executable.which(*executables, path=bin_dir)
|
||||
query_info["spec"] = concrete_spec
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _root_spec(spec_str):
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
spec_str += " %msvc"
|
||||
else:
|
||||
spec_str += " %gcc"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
tty.debug(f"[BOOTSTRAP ROOT SPEC] {spec_str}")
|
||||
return spec_str
|
||||
169
lib/spack/spack/bootstrap/config.py
Normal file
169
lib/spack/spack/bootstrap/config.py
Normal file
@@ -0,0 +1,169 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
|
||||
#: Reference counter for the bootstrapping configuration context manager
|
||||
_REF_COUNT = 0
|
||||
|
||||
|
||||
def is_bootstrapping():
|
||||
"""Return True if we are in a bootstrapping context, False otherwise."""
|
||||
return _REF_COUNT > 0
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
minor version (all patches are ABI compatible with the same minor).
|
||||
|
||||
See:
|
||||
https://www.python.org/dev/peps/pep-0513/
|
||||
https://stackoverflow.com/a/35801395/771663
|
||||
"""
|
||||
version_str = ".".join(str(x) for x in sys.version_info[:2])
|
||||
return f"python@{version_str}"
|
||||
|
||||
|
||||
def root_path():
|
||||
"""Root of all the bootstrap related folders"""
|
||||
return spack.util.path.canonicalize_path(
|
||||
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
|
||||
)
|
||||
|
||||
|
||||
def store_path():
|
||||
"""Path to the store used for bootstrapped software"""
|
||||
enabled = spack.config.get("bootstrap:enable", True)
|
||||
if not enabled:
|
||||
msg = 'bootstrapping is currently disabled. Use "spack bootstrap enable" to enable it'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return _store_path()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def spack_python_interpreter():
|
||||
"""Override the current configuration to set the interpreter under
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = sys.exec_prefix
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
"buildable": False,
|
||||
"externals": [{"prefix": python_prefix, "spec": str(external_python)}],
|
||||
}
|
||||
|
||||
with spack.config.override("packages:python::", entry):
|
||||
yield
|
||||
|
||||
|
||||
def _store_path():
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
|
||||
|
||||
|
||||
def _config_path():
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
"""Swap the current configuration for the one used to bootstrap Spack.
|
||||
|
||||
The context manager is reference counted to ensure we don't swap multiple
|
||||
times if there's nested use of it in the stack. One compelling use case
|
||||
is bootstrapping patchelf during the bootstrap of clingo.
|
||||
"""
|
||||
global _REF_COUNT # pylint: disable=global-statement
|
||||
already_swapped = bool(_REF_COUNT)
|
||||
_REF_COUNT += 1
|
||||
try:
|
||||
if already_swapped:
|
||||
yield
|
||||
else:
|
||||
with _ensure_bootstrap_configuration():
|
||||
yield
|
||||
finally:
|
||||
_REF_COUNT -= 1
|
||||
|
||||
|
||||
def _read_and_sanitize_configuration():
|
||||
"""Read the user configuration that needs to be reused for bootstrapping
|
||||
and remove the entries that should not be copied over.
|
||||
"""
|
||||
# Read the "config" section but pop the install tree (the entry will not be
|
||||
# considered due to the use_store context manager, so it will be confusing
|
||||
# to have it in the configuration).
|
||||
config_yaml = spack.config.get("config")
|
||||
config_yaml.pop("install_tree", None)
|
||||
user_configuration = {"bootstrap": spack.config.get("bootstrap"), "config": config_yaml}
|
||||
return user_configuration
|
||||
|
||||
|
||||
def _bootstrap_config_scopes():
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
tty.debug(msg.format(platform_scope.name, platform_scope.path))
|
||||
return config_scopes
|
||||
|
||||
|
||||
def _add_compilers_if_missing():
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ensure_bootstrap_configuration():
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||
bootstrap_store_path
|
||||
):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
# We may need to compile code from sources, so ensure we
|
||||
# have compilers for the current platform
|
||||
_add_compilers_if_missing()
|
||||
spack.config.set("bootstrap", user_configuration["bootstrap"])
|
||||
spack.config.set("config", user_configuration["config"])
|
||||
with spack.modules.disable_modules():
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
580
lib/spack/spack/bootstrap/core.py
Normal file
580
lib/spack/spack/bootstrap/core.py
Normal file
@@ -0,0 +1,580 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap Spack core dependencies from binaries.
|
||||
|
||||
This module contains logic to bootstrap software required by Spack from binaries served in the
|
||||
bootstrapping mirrors. The logic is quite different from an installation done from a Spack user,
|
||||
because of the following reasons:
|
||||
|
||||
1. The binaries are all compiled on the same OS for a given platform (e.g. they are compiled on
|
||||
``centos7`` on ``linux``), but they will be installed and used on the host OS. They are also
|
||||
targeted at the most generic architecture possible. That makes the binaries difficult to reuse
|
||||
with other specs in an environment without ad-hoc logic.
|
||||
2. Bootstrapping has a fallback procedure where we try to install software by default from the
|
||||
most recent binaries, and proceed to older versions of the mirror, until we try building from
|
||||
sources as a last resort. This allows us not to be blocked on architectures where we don't
|
||||
have binaries readily available, but is also not compatible with the working of environments
|
||||
(they don't have fallback procedures).
|
||||
3. Among the binaries we have clingo, so we can't concretize that with clingo :-)
|
||||
4. clingo, GnuPG and patchelf binaries need to be verified by sha256 sum (all the other binaries
|
||||
we might add on top of that in principle can be verified with GPG signatures).
|
||||
"""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
import llnl.util.envmod
|
||||
import llnl.util.executable
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.platforms.linux
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
|
||||
from ._common import (
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = "metadata.yaml"
|
||||
|
||||
#: Whether the current platform is Windows
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
|
||||
def bootstrapper(bootstrapper_type: str):
|
||||
"""Decorator to register classes implementing bootstrapping
|
||||
methods.
|
||||
|
||||
Args:
|
||||
bootstrapper_type: string identifying the class
|
||||
"""
|
||||
|
||||
def _register(cls):
|
||||
_bootstrap_methods[bootstrapper_type] = cls
|
||||
return cls
|
||||
|
||||
return _register
|
||||
|
||||
|
||||
class Bootstrapper:
|
||||
"""Interface for "core" software bootstrappers"""
|
||||
|
||||
config_scope_name = ""
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
self.name = conf["name"]
|
||||
self.url = conf["info"]["url"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
"""Mirror url associated with this bootstrapper"""
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if "://" in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
"""Mirror scope to be pushed onto the bootstrapping configuration when using
|
||||
this bootstrapper.
|
||||
"""
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
"""Try to import a Python module from a spec satisfying the abstract spec
|
||||
passed as argument.
|
||||
|
||||
Args:
|
||||
module: Python module name to try importing
|
||||
abstract_spec_str: abstract spec that can provide the Python module
|
||||
|
||||
Return:
|
||||
True if the Python module could be imported, False otherwise
|
||||
"""
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables: List[str], abstract_spec_str: str) -> bool:
|
||||
"""Try to search some executables in the prefix of specs satisfying the abstract
|
||||
spec passed as argument.
|
||||
|
||||
Args:
|
||||
executables: executables to be found
|
||||
abstract_spec_str: abstract spec that can provide the Python module
|
||||
|
||||
Return:
|
||||
True if the executables are found, False otherwise
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
@bootstrapper(bootstrapper_type="buildcache")
|
||||
class BuildcacheBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
def __init__(self, conf):
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
def _spec_and_platform(abstract_spec_str):
|
||||
"""Return the spec object and platform we need to use when
|
||||
querying the buildcache.
|
||||
|
||||
Args:
|
||||
abstract_spec_str: abstract spec string we are looking for
|
||||
"""
|
||||
# Try to install from an unsigned binary cache
|
||||
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||
# On Cray we want to use Linux binaries if available from mirrors
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
return abstract_spec, bincache_platform
|
||||
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = f"{package_name}.json"
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
with open(json_path, encoding="utf-8") as stream:
|
||||
data = json.load(stream)
|
||||
return data
|
||||
|
||||
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null",
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family),
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
||||
spec_str = "/" + pkg_hash
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn):
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
raise RuntimeError("The binary index is empty")
|
||||
|
||||
for item in bincache_data["verified"]:
|
||||
candidate_spec = item["spec"]
|
||||
# This will be None for things that don't depend on python
|
||||
python_spec = item.get("python", None)
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||
|
||||
info = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {module} from pre-built binaries")
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
data = self._read_metadata(module)
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str)
|
||||
tty.debug(f"Bootstrapping {abstract_spec.name} from pre-built binaries")
|
||||
data = self._read_metadata(abstract_spec.name)
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
|
||||
@bootstrapper(bootstrapper_type="install")
|
||||
class SourceBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
|
||||
def __init__(self, conf):
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
info = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {module} from sources")
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
info = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {abstract_spec_str} from sources")
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def create_bootstrapper(conf):
|
||||
"""Return a bootstrap object built according to the configuration argument"""
|
||||
btype = conf["type"]
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
"""Make the requested module available for import, or raise.
|
||||
|
||||
This function tries to import a Python module in the current interpreter
|
||||
using, in order, the methods configured in bootstrap.yaml.
|
||||
|
||||
If none of the methods succeed, an exception is raised. The function exits
|
||||
on first success.
|
||||
|
||||
Args:
|
||||
module: module to be imported in the current interpreter
|
||||
abstract_spec: abstract spec that might provide the module. If not
|
||||
given it defaults to "module"
|
||||
|
||||
Raises:
|
||||
ImportError: if the module couldn't be imported
|
||||
"""
|
||||
# If we can import it already, that's great
|
||||
tty.debug(f"[BOOTSTRAP MODULE {module}] Try importing from Python")
|
||||
if _python_import(module):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
def ensure_executables_in_path_or_raise(
|
||||
executables: list,
|
||||
abstract_spec: str,
|
||||
cmd_check: Optional[Callable[[llnl.util.executable.Executable], bool]] = None,
|
||||
):
|
||||
"""Ensure that some executables are in path or raise.
|
||||
|
||||
Args:
|
||||
executables (list): list of executables to be searched in the PATH,
|
||||
in order. The function exits on the first one found.
|
||||
abstract_spec (str): abstract spec that provides the executables
|
||||
cmd_check (object): callable predicate that takes a
|
||||
``llnl.util.executable.Executable`` command and validate it. Should return
|
||||
``True`` if the executable is acceptable, ``False`` otherwise.
|
||||
Can be used to, e.g., ensure a suitable version of the command before
|
||||
accepting for bootstrapping.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the executables cannot be ensured to be in PATH
|
||||
|
||||
Return:
|
||||
Executable object
|
||||
|
||||
"""
|
||||
cmd = llnl.util.executable.which(*executables)
|
||||
if cmd:
|
||||
if not cmd_check or cmd_check(cmd):
|
||||
return cmd
|
||||
|
||||
executables_str = ", ".join(executables)
|
||||
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = (
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
env_mods = llnl.util.envmod.EnvironmentModifications()
|
||||
for dep in concrete_spec.traverse(
|
||||
root=True, order="post", deptype=("link", "run")
|
||||
):
|
||||
env_mods.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(
|
||||
dep, set_package_py_globals=False
|
||||
)
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _add_externals_if_missing():
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
"""Return the root spec used to bootstrap clingo"""
|
||||
return _root_spec("clingo-bootstrap@spack+python")
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec())
|
||||
|
||||
|
||||
def gnupg_root_spec():
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
return _root_spec("gnupg@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise():
|
||||
"""Ensure gpg or gpg2 are in the PATH or raise."""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec():
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
return _root_spec("patchelf@0.13.1:")
|
||||
|
||||
|
||||
def verify_patchelf(patchelf):
|
||||
"""Older patchelf versions can produce broken binaries, so we
|
||||
verify the version here.
|
||||
|
||||
Arguments:
|
||||
|
||||
patchelf (llnl.util.executable.Executable): patchelf executable
|
||||
"""
|
||||
out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip()
|
||||
if patchelf.returncode != 0:
|
||||
return False
|
||||
parts = out.split(" ")
|
||||
if len(parts) < 2:
|
||||
return False
|
||||
try:
|
||||
version = spack.version.Version(parts[1])
|
||||
except ValueError:
|
||||
return False
|
||||
return version >= spack.version.Version("0.13.1")
|
||||
|
||||
|
||||
def ensure_patchelf_in_path_or_raise():
|
||||
"""Ensure patchelf is in the PATH or raise."""
|
||||
# The old concretizer is not smart and we're doing its job: if the latest patchelf
|
||||
# does not concretize because the compiler doesn't support C++17, we try to
|
||||
# concretize again with an upperbound @:13.
|
||||
try:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"], abstract_spec=patchelf_root_spec(), cmd_check=verify_patchelf
|
||||
)
|
||||
except RuntimeError:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"],
|
||||
abstract_spec=_root_spec("patchelf@0.13.1:0.13"),
|
||||
cmd_check=verify_patchelf,
|
||||
)
|
||||
|
||||
|
||||
def ensure_core_dependencies():
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs():
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope: Optional[str] = None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope: if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
191
lib/spack/spack/bootstrap/environment.py
Normal file
191
lib/spack/spack/bootstrap/environment.py
Normal file
@@ -0,0 +1,191 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.executable
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.environment
|
||||
import spack.tengine
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||
|
||||
@classmethod
|
||||
def spack_dev_requirements(cls):
|
||||
"""Spack development requirements"""
|
||||
return [
|
||||
isort_root_spec(),
|
||||
mypy_root_spec(),
|
||||
black_root_spec(),
|
||||
flake8_root_spec(),
|
||||
pytest_root_spec(),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def environment_root(cls):
|
||||
"""Environment root directory"""
|
||||
bootstrap_root_path = root_path()
|
||||
python_part = spec_for_current_python().replace("@", "")
|
||||
arch_part = archspec.cpu.host().family
|
||||
interpreter_part = hashlib.md5(sys.exec_prefix.encode()).hexdigest()[:5]
|
||||
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
||||
return pathlib.Path(
|
||||
spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, "environments", environment_dir)
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def view_root(cls):
|
||||
"""Location of the view"""
|
||||
return cls.environment_root().joinpath("view")
|
||||
|
||||
@classmethod
|
||||
def pythonpaths(cls):
|
||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||
result = glob.glob(glob_expr)
|
||||
if not result:
|
||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||
warnings.warn(msg)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def bin_dirs(cls):
|
||||
"""Paths to be added to PATH"""
|
||||
return [cls.view_root().joinpath("bin")]
|
||||
|
||||
@classmethod
|
||||
def spack_yaml(cls):
|
||||
"""Environment spack.yaml file"""
|
||||
return cls.environment_root().joinpath("spack.yaml")
|
||||
|
||||
def __init__(self):
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self):
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
method of environments on Windows.
|
||||
"""
|
||||
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||
specs = self.concretize()
|
||||
if specs:
|
||||
colorized_specs = [
|
||||
spack.spec.Spec(x).cformat("{name}{@version}")
|
||||
for x in self.spack_dev_requirements()
|
||||
]
|
||||
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||
self.write(regenerate=False)
|
||||
if sys.platform == "win32":
|
||||
self.install_all()
|
||||
else:
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self):
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
the environment view.
|
||||
"""
|
||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||
# the performance of the current interpreter
|
||||
sys.path.extend(self.pythonpaths())
|
||||
os.environ["PATH"] = os.pathsep.join(
|
||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||
)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self):
|
||||
spackcmd = llnl.util.executable.which("spack")
|
||||
spackcmd(
|
||||
"-e",
|
||||
str(self.environment_root()),
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
str(self.environment_root().joinpath("Makefile")),
|
||||
)
|
||||
make = llnl.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||
make(
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self):
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
)
|
||||
env = spack.tengine.make_environment()
|
||||
template = env.get_template("bootstrap/spack.yaml")
|
||||
context = {
|
||||
"python_spec": spec_for_current_python(),
|
||||
"python_prefix": sys.exec_prefix,
|
||||
"architecture": archspec.cpu.host().family,
|
||||
"environment_path": self.environment_root(),
|
||||
"environment_specs": self.spack_dev_requirements(),
|
||||
"store_path": store_path(),
|
||||
}
|
||||
self.environment_root().mkdir(parents=True, exist_ok=True)
|
||||
self.spack_yaml().write_text(template.render(context), encoding="utf-8")
|
||||
|
||||
|
||||
def isort_root_spec():
|
||||
"""Return the root spec used to bootstrap isort"""
|
||||
return _root_spec("py-isort@4.3.5:")
|
||||
|
||||
|
||||
def mypy_root_spec():
|
||||
"""Return the root spec used to bootstrap mypy"""
|
||||
return _root_spec("py-mypy@0.900:")
|
||||
|
||||
|
||||
def black_root_spec():
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
|
||||
|
||||
def pytest_root_spec():
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
|
||||
|
||||
def ensure_environment_dependencies():
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
169
lib/spack/spack/bootstrap/status.py
Normal file
169
lib/spack/spack/bootstrap/status.py
Normal file
@@ -0,0 +1,169 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Query the status of bootstrapping on this machine"""
|
||||
import platform
|
||||
|
||||
import llnl.util.executable
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _try_import_from_store
|
||||
from .config import ensure_bootstrap_configuration
|
||||
from .core import clingo_root_spec, patchelf_root_spec
|
||||
from .environment import (
|
||||
BootstrapEnvironment,
|
||||
black_root_spec,
|
||||
flake8_root_spec,
|
||||
isort_root_spec,
|
||||
mypy_root_spec,
|
||||
pytest_root_spec,
|
||||
)
|
||||
|
||||
|
||||
def _required_system_executable(exes, msg):
|
||||
"""Search for an executable is the system path only."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
if llnl.util.executable.which_string(*exes):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_executable(exes, query_spec, msg):
|
||||
"""Search for an executable in the system path or in the bootstrap store."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
if llnl.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_python_module(module, query_spec, msg):
|
||||
"""Check if a Python module is available in the current interpreter or
|
||||
if it can be loaded from the bootstrap store
|
||||
"""
|
||||
if _python_import(module) or _try_import_from_store(module, query_spec):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
if not system_only:
|
||||
return msg.format(name, purpose, "@*y{{B}}")
|
||||
return msg.format(name, purpose, "@*y{{-}}")
|
||||
|
||||
|
||||
def _core_requirements():
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"bash": _missing("bash", "required for Spack compiler wrapper"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
"unzip": _missing("unzip", "required to compress/decompress code archives"),
|
||||
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
|
||||
"git": _missing("git", "required to fetch/manage git repositories"),
|
||||
}
|
||||
if platform.system().lower() == "linux":
|
||||
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _core_system_exes.items()]
|
||||
# Python modules
|
||||
result.append(
|
||||
_required_python_module(
|
||||
"clingo", clingo_root_spec(), _missing("clingo", "required to concretize specs", False)
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def _buildcache_requirements():
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
result.append(
|
||||
_required_executable(
|
||||
"patchelf",
|
||||
patchelf_root_spec(),
|
||||
_missing("patchelf", "required to relocate binaries", False),
|
||||
)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _optional_requirements():
|
||||
_optional_exes = {
|
||||
"zstd": _missing("zstd", "required to compress/decompress code archives"),
|
||||
"svn": _missing("svn", "required to manage subversion repositories"),
|
||||
"hg": _missing("hg", "required to manage mercurial repositories"),
|
||||
}
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _optional_exes.items()]
|
||||
return result
|
||||
|
||||
|
||||
def _development_requirements():
|
||||
# Ensure we trigger environment modifications if we have an environment
|
||||
if BootstrapEnvironment.spack_yaml().exists():
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
return [
|
||||
_required_executable(
|
||||
"isort", isort_root_spec(), _missing("isort", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"mypy", mypy_root_spec(), _missing("mypy", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"flake8", flake8_root_spec(), _missing("flake8", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"black", black_root_spec(), _missing("black", "required for code formatting", False)
|
||||
),
|
||||
_required_python_module(
|
||||
"pytest", pytest_root_spec(), _missing("pytest", "required to run unit-test", False)
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def status_message(section):
|
||||
"""Return a status message to be printed to screen that refers to the
|
||||
section passed as argument and a bool which is True if there are missing
|
||||
dependencies.
|
||||
|
||||
Args:
|
||||
section (str): either 'core' or 'buildcache' or 'optional' or 'develop'
|
||||
"""
|
||||
pass_token, fail_token = "@*g{[PASS]}", "@*r{[FAIL]}"
|
||||
|
||||
# Contain the header of the section and a list of requirements
|
||||
spack_sections = {
|
||||
"core": ("{0} @*{{Core Functionalities}}", _core_requirements),
|
||||
"buildcache": ("{0} @*{{Binary packages}}", _buildcache_requirements),
|
||||
"optional": ("{0} @*{{Optional Features}}", _optional_requirements),
|
||||
"develop": ("{0} @*{{Development Dependencies}}", _development_requirements),
|
||||
}
|
||||
msg, required_software = spack_sections[section]
|
||||
|
||||
with ensure_bootstrap_configuration():
|
||||
missing_software = False
|
||||
for found, err_msg in required_software():
|
||||
if not found:
|
||||
missing_software = True
|
||||
msg += "\n " + err_msg
|
||||
msg += "\n"
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
@@ -33,25 +33,36 @@
|
||||
calls you can make from within the install() function.
|
||||
"""
|
||||
import inspect
|
||||
import io
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
|
||||
from six import StringIO
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import install, install_tree, mkdirp
|
||||
from llnl.util.envmod import (
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
system_dirs,
|
||||
validate,
|
||||
)
|
||||
from llnl.util.executable import Executable
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.string import plural
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
@@ -64,25 +75,12 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.environment import (
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
system_dirs,
|
||||
validate,
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
from spack.util.string import plural
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -285,6 +283,23 @@ def clean_environment():
|
||||
return env
|
||||
|
||||
|
||||
def _add_werror_handling(keep_werror, env):
|
||||
keep_flags = set()
|
||||
# set of pairs
|
||||
replace_flags: List[Tuple[str, str]] = []
|
||||
if keep_werror == "all":
|
||||
keep_flags.add("-Werror*")
|
||||
else:
|
||||
if keep_werror == "specific":
|
||||
keep_flags.add("-Werror-*")
|
||||
keep_flags.add("-Werror=*")
|
||||
# This extra case is to handle -Werror-implicit-function-declaration
|
||||
replace_flags.append(("-Werror-", "-Wno-error="))
|
||||
replace_flags.append(("-Werror", "-Wno-error"))
|
||||
env.set("SPACK_COMPILER_FLAGS_KEEP", "|".join(keep_flags))
|
||||
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
||||
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
assert pkg.spec.concrete
|
||||
compiler = pkg.compiler
|
||||
@@ -331,6 +346,13 @@ def set_compiler_environment_variables(pkg, env):
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
||||
|
||||
if pkg.keep_werror is not None:
|
||||
keep_werror = pkg.keep_werror
|
||||
else:
|
||||
keep_werror = spack.config.get("config:flags:keep_werror")
|
||||
|
||||
_add_werror_handling(keep_werror, env)
|
||||
|
||||
# Set the target parameters that the compiler will add
|
||||
# Don't set on cray platform because the targeting module handles this
|
||||
if spec.satisfies("platform=cray"):
|
||||
@@ -353,10 +375,8 @@ def set_compiler_environment_variables(pkg, env):
|
||||
if isinstance(pkg.flag_handler, types.FunctionType):
|
||||
handler = pkg.flag_handler
|
||||
else:
|
||||
if sys.version_info >= (3, 0):
|
||||
handler = pkg.flag_handler.__func__
|
||||
else:
|
||||
handler = pkg.flag_handler.im_func
|
||||
handler = pkg.flag_handler.__func__
|
||||
|
||||
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:])
|
||||
inject_flags[flag] = injf or []
|
||||
env_flags[flag] = envf or []
|
||||
@@ -542,14 +562,18 @@ def determine_number_of_jobs(
|
||||
return min(max_cpus, config_default)
|
||||
|
||||
|
||||
def _set_variables_for_single_module(pkg, module):
|
||||
"""Helper function to set module variables for single module."""
|
||||
def set_module_variables_for_package(pkg):
|
||||
"""Populate the Python module of a package with some useful global names.
|
||||
This makes things easier for package writers.
|
||||
"""
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
marker = "_set_run_already_called"
|
||||
if getattr(module, marker, False):
|
||||
if getattr(pkg.module, marker, False):
|
||||
return
|
||||
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
|
||||
m = module
|
||||
@@ -560,15 +584,13 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
|
||||
# easy shortcut to os.environ
|
||||
m.env = os.environ
|
||||
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
@@ -581,21 +603,6 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
m.pwd = os.getcwd
|
||||
m.cd = os.chdir
|
||||
m.mkdir = os.mkdir
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = symlink
|
||||
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
m.install_tree = install_tree
|
||||
m.rmtree = shutil.rmtree
|
||||
m.move = shutil.move
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
m.prefix = pkg.prefix
|
||||
@@ -616,20 +623,7 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
setattr(m, marker, True)
|
||||
|
||||
|
||||
def set_module_variables_for_package(pkg):
|
||||
"""Populate the module scope of install() with some useful functions.
|
||||
This makes things easier for package writers.
|
||||
"""
|
||||
# If a user makes their own package repo, e.g.
|
||||
# spack.pkg.mystuff.libelf.Libelf, and they inherit from an existing class
|
||||
# like spack.pkg.original.libelf.Libelf, then set the module variables
|
||||
# for both classes so the parent class can still use them if it gets
|
||||
# called. parent_class_modules includes pkg.module.
|
||||
modules = parent_class_modules(pkg.__class__)
|
||||
for mod in modules:
|
||||
_set_variables_for_single_module(pkg, mod)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
|
||||
def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwargs):
|
||||
@@ -739,25 +733,6 @@ def get_rpaths(pkg):
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
def parent_class_modules(cls):
|
||||
"""
|
||||
Get list of superclass modules that descend from spack.package_base.PackageBase
|
||||
|
||||
Includes cls.__module__
|
||||
"""
|
||||
if not issubclass(cls, spack.package_base.PackageBase) or issubclass(
|
||||
spack.package_base.PackageBase, cls
|
||||
):
|
||||
return []
|
||||
result = []
|
||||
module = sys.modules.get(cls.__module__)
|
||||
if module:
|
||||
result = [module]
|
||||
for c in cls.__bases__:
|
||||
result.extend(parent_class_modules(c))
|
||||
return result
|
||||
|
||||
|
||||
def load_external_modules(pkg):
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
@@ -978,22 +953,9 @@ def add_modifications_for_dep(dep):
|
||||
if set_package_py_globals:
|
||||
set_module_variables_for_package(dpkg)
|
||||
|
||||
# Allow dependencies to modify the module
|
||||
# Get list of modules that may need updating
|
||||
modules = []
|
||||
for cls in inspect.getmro(type(spec.package)):
|
||||
module = cls.module
|
||||
if module == spack.package_base:
|
||||
break
|
||||
modules.append(module)
|
||||
|
||||
# Execute changes as if on a single module
|
||||
# copy dict to ensure prior changes are available
|
||||
changes = spack.util.pattern.Bunch()
|
||||
dpkg.setup_dependent_package(changes, spec)
|
||||
|
||||
for module in modules:
|
||||
module.__dict__.update(changes.__dict__)
|
||||
current_module = ModuleChangePropagator(spec.package)
|
||||
dpkg.setup_dependent_package(current_module, spec)
|
||||
current_module.propagate_changes_to_mro()
|
||||
|
||||
if context == "build":
|
||||
builder = spack.builder.create(dpkg)
|
||||
@@ -1271,6 +1233,8 @@ def make_stack(tb, stack=None):
|
||||
obj = frame.f_locals["self"]
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
# Point out the location in the install method where we failed.
|
||||
@@ -1339,7 +1303,7 @@ class ChildError(InstallError):
|
||||
|
||||
# List of errors considered "build errors", for which we'll show log
|
||||
# context instead of Python context.
|
||||
build_errors = [("spack.util.executable", "ProcessError")]
|
||||
build_errors = [("llnl.util.executable", "ProcessError")]
|
||||
|
||||
def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
|
||||
super(ChildError, self).__init__(msg)
|
||||
@@ -1352,7 +1316,7 @@ def __init__(self, msg, module, classname, traceback_string, log_name, log_type,
|
||||
|
||||
@property
|
||||
def long_message(self):
|
||||
out = StringIO()
|
||||
out = io.StringIO()
|
||||
out.write(self._long_message if self._long_message else "")
|
||||
|
||||
have_log = self.log_name and os.path.exists(self.log_name)
|
||||
@@ -1437,3 +1401,51 @@ def write_log_summary(out, log_type, log, last=None):
|
||||
# If no errors are found but warnings are, display warnings
|
||||
out.write("\n%s found in %s log:\n" % (plural(nwar, "warning"), log_type))
|
||||
out.write(make_log_context(warnings))
|
||||
|
||||
|
||||
class ModuleChangePropagator:
|
||||
"""Wrapper class to accept changes to a package.py Python module, and propagate them in the
|
||||
MRO of the package.
|
||||
|
||||
It is mainly used as a substitute of the ``package.py`` module, when calling the
|
||||
"setup_dependent_package" function during build environment setup.
|
||||
"""
|
||||
|
||||
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||
|
||||
def __init__(self, package):
|
||||
self._set_self_attributes("package", package)
|
||||
self._set_self_attributes("current_module", package.module)
|
||||
|
||||
#: Modules for the classes in the MRO up to PackageBase
|
||||
modules_in_mro = []
|
||||
for cls in inspect.getmro(type(package)):
|
||||
module = cls.module
|
||||
|
||||
if module == self.current_module:
|
||||
continue
|
||||
|
||||
if module == spack.package_base:
|
||||
break
|
||||
|
||||
modules_in_mro.append(module)
|
||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
||||
def _set_self_attributes(self, key, value):
|
||||
super().__setattr__(key, value)
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.current_module, item)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key in ModuleChangePropagator._PROTECTED_NAMES:
|
||||
msg = f'Cannot set attribute "{key}" in ModuleMonkeyPatcher'
|
||||
return AttributeError(msg)
|
||||
|
||||
setattr(self.current_module, key, value)
|
||||
self._set_attributes[key] = value
|
||||
|
||||
def propagate_changes_to_mro(self):
|
||||
for module_in_mro in self.modules_in_mro:
|
||||
module_in_mro.__dict__.update(self._set_attributes)
|
||||
|
||||
@@ -3,30 +3,30 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import six
|
||||
from typing import List
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.builder
|
||||
import spack.installer
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
import spack.store
|
||||
|
||||
|
||||
def sanity_check_prefix(builder):
|
||||
def sanity_check_prefix(builder: spack.builder.Builder):
|
||||
"""Check that specific directories and files are created after installation.
|
||||
|
||||
The files to be checked are in the ``sanity_check_is_file`` attribute of the
|
||||
package object, while the directories are in the ``sanity_check_is_dir``.
|
||||
|
||||
Args:
|
||||
builder (spack.builder.Builder): builder that installed the package
|
||||
builder: builder that installed the package
|
||||
"""
|
||||
pkg = builder.pkg
|
||||
|
||||
def check_paths(path_list, filetype, predicate):
|
||||
if isinstance(path_list, six.string_types):
|
||||
if isinstance(path_list, str):
|
||||
path_list = [path_list]
|
||||
|
||||
for path in path_list:
|
||||
@@ -45,7 +45,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||
|
||||
|
||||
def apply_macos_rpath_fixups(builder):
|
||||
def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
"""On Darwin, make installed libraries more easily relocatable.
|
||||
|
||||
Some build systems (handrolled, autotools, makefiles) can set their own
|
||||
@@ -57,20 +57,22 @@ def apply_macos_rpath_fixups(builder):
|
||||
packages) that do not install relocatable libraries by default.
|
||||
|
||||
Args:
|
||||
builder (spack.builder.Builder): builder that installed the package
|
||||
builder: builder that installed the package
|
||||
"""
|
||||
spack.relocate.fixup_macos_rpaths(builder.spec)
|
||||
|
||||
|
||||
def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
|
||||
def ensure_build_dependencies_or_raise(
|
||||
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
|
||||
):
|
||||
"""Ensure that some build dependencies are present in the concrete spec.
|
||||
|
||||
If not, raise a RuntimeError with a helpful error message.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): concrete spec to be checked.
|
||||
dependencies (list of spack.spec.Spec): list of abstract specs to be satisfied
|
||||
error_msg (str): brief error message to be prepended to a longer description
|
||||
spec: concrete spec to be checked.
|
||||
dependencies: list of abstract specs to be satisfied
|
||||
error_msg: brief error message to be prepended to a longer description
|
||||
|
||||
Raises:
|
||||
RuntimeError: when the required build dependencies are not found
|
||||
@@ -85,33 +87,35 @@ def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
|
||||
# Raise an exception on missing deps.
|
||||
msg = (
|
||||
"{0}: missing dependencies: {1}.\n\nPlease add "
|
||||
"the following lines to the package:\n\n".format(error_msg, ", ".join(missing_deps))
|
||||
"the following lines to the package:\n\n".format(
|
||||
error_msg, ", ".join(str(d) for d in missing_deps)
|
||||
)
|
||||
)
|
||||
|
||||
for dep in missing_deps:
|
||||
msg += " depends_on('{0}', type='build', when='@{1} {2}')\n".format(
|
||||
msg += ' depends_on("{0}", type="build", when="@{1} {2}")\n'.format(
|
||||
dep, spec.version, "build_system=autotools"
|
||||
)
|
||||
|
||||
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
|
||||
msg += '\nUpdate the version (when="@{0}") as needed.'.format(spec.version)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def execute_build_time_tests(builder):
|
||||
def execute_build_time_tests(builder: spack.builder.Builder):
|
||||
"""Execute the build-time tests prescribed by builder.
|
||||
|
||||
Args:
|
||||
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is
|
||||
builder: builder prescribing the test callbacks. The name of the callbacks is
|
||||
stored as a list of strings in the ``build_time_test_callbacks`` attribute.
|
||||
"""
|
||||
builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build")
|
||||
|
||||
|
||||
def execute_install_time_tests(builder):
|
||||
def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
"""Execute the install-time tests prescribed by builder.
|
||||
|
||||
Args:
|
||||
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is
|
||||
builder: builder prescribing the test callbacks. The name of the callbacks is
|
||||
stored as a list of strings in the ``install_time_test_callbacks`` attribute.
|
||||
"""
|
||||
builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install")
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.executable
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
||||
@@ -22,7 +22,7 @@ def configure(self, pkg, spec, prefix):
|
||||
prezip = spec["aspell"].prefix.bin.prezip
|
||||
destdir = prefix
|
||||
|
||||
sh = spack.util.executable.which("sh")
|
||||
sh = llnl.util.executable.which("sh")
|
||||
sh(
|
||||
"./configure",
|
||||
"--vars",
|
||||
|
||||
@@ -7,10 +7,11 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.executable import Executable
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
@@ -18,7 +19,6 @@
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.util.executable import Executable
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import (
|
||||
@@ -138,7 +138,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
patch_libtool = True
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
@@ -152,7 +152,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
force_autoreconf = False
|
||||
|
||||
#: Options to be passed to autoreconf when using the default implementation
|
||||
autoreconf_extra_args = [] # type: List[str]
|
||||
autoreconf_extra_args: List[str] = []
|
||||
|
||||
#: If False deletes all the .la files in the prefix folder after the installation.
|
||||
#: If True instead it installs them.
|
||||
|
||||
@@ -34,22 +34,22 @@ class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
#: Phases of a Cached CMake package
|
||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||
phases = ("initconfig", "cmake", "build", "install") # type: Tuple[str, ...]
|
||||
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = CMakeBuilder.legacy_methods + (
|
||||
legacy_methods: Tuple[str, ...] = CMakeBuilder.legacy_methods + (
|
||||
"initconfig_compiler_entries",
|
||||
"initconfig_mpi_entries",
|
||||
"initconfig_hardware_entries",
|
||||
"std_initconfig_entries",
|
||||
"initconfig_package_entries",
|
||||
) # type: Tuple[str, ...]
|
||||
)
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = CMakeBuilder.legacy_attributes + (
|
||||
legacy_attributes: Tuple[str, ...] = CMakeBuilder.legacy_attributes + (
|
||||
"cache_name",
|
||||
"cache_path",
|
||||
) # type: Tuple[str, ...]
|
||||
)
|
||||
|
||||
@property
|
||||
def cache_name(self):
|
||||
@@ -205,13 +205,7 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
||||
|
||||
if spec.satisfies("^mpi"):
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${MPI_CXX_COMPILER}"))
|
||||
else:
|
||||
entries.append(
|
||||
cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}")
|
||||
)
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import platform
|
||||
@@ -9,15 +10,11 @@
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.compat import Sequence
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.path
|
||||
from spack.directives import build_system, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -155,13 +152,13 @@ class CMakeBuilder(BaseBuilder):
|
||||
"""
|
||||
|
||||
#: Phases of a CMake package
|
||||
phases = ("cmake", "build", "install") # type: Tuple[str, ...]
|
||||
phases: Tuple[str, ...] = ("cmake", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("cmake_args", "check") # type: Tuple[str, ...]
|
||||
legacy_methods: Tuple[str, ...] = ("cmake_args", "check")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"generator",
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
@@ -171,7 +168,7 @@ class CMakeBuilder(BaseBuilder):
|
||||
"std_cmake_args",
|
||||
"build_dirname",
|
||||
"build_directory",
|
||||
) # type: Tuple[str, ...]
|
||||
)
|
||||
|
||||
#: The build system generator to use.
|
||||
#:
|
||||
@@ -184,7 +181,7 @@ class CMakeBuilder(BaseBuilder):
|
||||
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
|
||||
#: Targets to be used during the build phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets to be used during the install phase
|
||||
install_targets = ["install"]
|
||||
#: Callback names for build-time test
|
||||
@@ -302,7 +299,7 @@ def define(cmake_var, value):
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, Sequence) and not isinstance(value, six.string_types):
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
@@ -35,10 +35,10 @@ class GenericBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = () # type: Tuple[str, ...]
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("archive_files",) # type: Tuple[str, ...]
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files",)
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
@@ -13,7 +13,7 @@ class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path = None # type: Optional[str]
|
||||
gnu_mirror_path: Optional[str] = None
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.envmod import EnvironmentModifications
|
||||
from llnl.util.executable import Executable
|
||||
from llnl.util.filesystem import (
|
||||
HeaderList,
|
||||
LibraryList,
|
||||
@@ -25,8 +27,6 @@
|
||||
import spack.error
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.version import Version, ver
|
||||
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.executable
|
||||
from llnl.util.filesystem import find
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -41,11 +41,11 @@ class LuaPackage(spack.package_base.PackageBase):
|
||||
|
||||
@property
|
||||
def lua(self):
|
||||
return spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.lua)
|
||||
return llnl.util.executable.Executable(self.spec["lua-lang"].prefix.bin.lua)
|
||||
|
||||
@property
|
||||
def luarocks(self):
|
||||
lr = spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.luarocks)
|
||||
lr = llnl.util.executable.Executable(self.spec["lua-lang"].prefix.bin.luarocks)
|
||||
return lr
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -77,7 +77,7 @@ class MakefileBuilder(BaseBuilder):
|
||||
)
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.executable import which
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -95,7 +95,7 @@ class MesonBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
install_targets = ["install"]
|
||||
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -72,7 +72,7 @@ class NMakeBuilder(BaseBuilder):
|
||||
)
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase
|
||||
build_targets = [] # type: List[str]
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
|
||||
@@ -8,10 +8,11 @@
|
||||
import shutil
|
||||
from os.path import basename, dirname, isdir
|
||||
|
||||
from llnl.util.envmod import EnvironmentModifications
|
||||
from llnl.util.executable import Executable
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.directives import conflicts, variant
|
||||
|
||||
from .generic import Package
|
||||
|
||||
@@ -25,6 +26,23 @@ class IntelOneApiPackage(Package):
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
|
||||
for c in [
|
||||
"target=ppc64:",
|
||||
"target=ppc64le:",
|
||||
"target=aarch64:",
|
||||
"platform=darwin:",
|
||||
"platform=cray:",
|
||||
"platform=windows:",
|
||||
]:
|
||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||
|
||||
# Add variant to toggle environment modifications from vars.sh
|
||||
variant(
|
||||
"envmods",
|
||||
default=True,
|
||||
description="Toggles environment modifications",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
"""Updates oneapi package descriptions with common text."""
|
||||
@@ -103,11 +121,13 @@ def setup_run_environment(self, env):
|
||||
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
# Only if environment modifications are desired (default is +envmods)
|
||||
if "+envmods" in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
|
||||
@@ -5,13 +5,13 @@
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from llnl.util.executable import Executable
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import glob
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
@@ -16,6 +15,7 @@
|
||||
import spack.builder
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.version import Version
|
||||
@@ -177,7 +177,7 @@ class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi = None # type: Optional[str]
|
||||
pypi: Optional[str] = None
|
||||
|
||||
maintainers = ["adamjstewart", "pradyunsg"]
|
||||
|
||||
@@ -200,7 +200,7 @@ class PythonPackage(PythonExtension):
|
||||
# package manually
|
||||
depends_on("py-wheel", type="build")
|
||||
|
||||
py_namespace = None # type: Optional[str]
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
@@ -219,13 +219,34 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = spack.spec.Spec("python")
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
python._mark_concrete()
|
||||
python.external_path = self.prefix
|
||||
self.spec.add_dependency_edge(python, ("build", "link", "run"))
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Headers may be in either location
|
||||
include = self.prefix.join(self.include)
|
||||
platlib = self.prefix.join(self.platlib)
|
||||
include = self.prefix.join(self.spec["python"].package.include)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
||||
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
|
||||
|
||||
if headers:
|
||||
@@ -234,29 +255,13 @@ def headers(self):
|
||||
msg = "Unable to locate {} headers in {} or {}"
|
||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||
|
||||
@property
|
||||
def include(self):
|
||||
include = glob.glob(self.prefix.include.join("python*"))
|
||||
if include:
|
||||
return include[0]
|
||||
return self.spec["python"].package.include
|
||||
|
||||
@property
|
||||
def platlib(self):
|
||||
for libname in ("lib", "lib64"):
|
||||
platlib = glob.glob(self.prefix.join(libname).join("python*").join("site-packages"))
|
||||
if platlib:
|
||||
return platlib[0]
|
||||
|
||||
return self.spec["python"].package.platlib
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
library = "lib" + self.spec.name[3:].replace("-", "?")
|
||||
root = self.prefix.join(self.platlib)
|
||||
root = self.prefix.join(self.spec["python"].package.platlib)
|
||||
|
||||
for shared in [True, False]:
|
||||
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
|
||||
|
||||
@@ -81,6 +81,6 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search the Makefile for a ``check:`` target and runs it if found."""
|
||||
with working_dir(self.build_directory):
|
||||
self._if_make_target_execute("check")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
@@ -22,10 +22,10 @@ class RBuilder(GenericBuilder):
|
||||
"""
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = (
|
||||
legacy_methods: Tuple[str, ...] = (
|
||||
"configure_args",
|
||||
"configure_vars",
|
||||
) + GenericBuilder.legacy_methods # type: Tuple[str, ...]
|
||||
) + GenericBuilder.legacy_methods
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to install via ``--configure-args``."""
|
||||
@@ -64,10 +64,10 @@ class RPackage(Package):
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran = None # type: Optional[str]
|
||||
cran: Optional[str] = None
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc = None # type: Optional[str]
|
||||
bioc: Optional[str] = None
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
|
||||
@@ -8,13 +8,13 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.envmod import env_flag
|
||||
from llnl.util.executable import Executable, ProcessError
|
||||
|
||||
import spack.builder
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import build_system, extends
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class RacketPackage(PackageBase):
|
||||
@@ -34,7 +34,7 @@ class RacketPackage(PackageBase):
|
||||
|
||||
extends("racket", when="build_system=racket")
|
||||
|
||||
racket_name = None # type: Optional[str]
|
||||
racket_name: Optional[str] = None
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
@@ -51,7 +51,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = tuple() # type: Tuple[str, ...]
|
||||
legacy_methods: Tuple[str, ...] = tuple()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory")
|
||||
@@ -59,7 +59,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
racket_name = None # type: Optional[str]
|
||||
racket_name: Optional[str] = None
|
||||
|
||||
@property
|
||||
def subdirectory(self):
|
||||
|
||||
@@ -96,18 +96,33 @@ class ROCmPackage(PackageBase):
|
||||
"gfx803",
|
||||
"gfx900",
|
||||
"gfx900:xnack-",
|
||||
"gfx902",
|
||||
"gfx904",
|
||||
"gfx906",
|
||||
"gfx908",
|
||||
"gfx90a",
|
||||
"gfx906:xnack-",
|
||||
"gfx908",
|
||||
"gfx908:xnack-",
|
||||
"gfx909",
|
||||
"gfx90a",
|
||||
"gfx90a:xnack-",
|
||||
"gfx90a:xnack+",
|
||||
"gfx90c",
|
||||
"gfx940",
|
||||
"gfx1010",
|
||||
"gfx1011",
|
||||
"gfx1012",
|
||||
"gfx1013",
|
||||
"gfx1030",
|
||||
"gfx1031",
|
||||
"gfx1032",
|
||||
"gfx1033",
|
||||
"gfx1034",
|
||||
"gfx1035",
|
||||
"gfx1036",
|
||||
"gfx1100",
|
||||
"gfx1101",
|
||||
"gfx1102",
|
||||
"gfx1103",
|
||||
)
|
||||
|
||||
variant("rocm", default=False, description="Enable ROCm support")
|
||||
@@ -117,6 +132,7 @@ class ROCmPackage(PackageBase):
|
||||
"amdgpu_target",
|
||||
description="AMD GPU architecture",
|
||||
values=spack.variant.any_combination_of(*amdgpu_targets),
|
||||
sticky=True,
|
||||
when="+rocm",
|
||||
)
|
||||
|
||||
@@ -144,6 +160,29 @@ def hip_flags(amdgpu_target):
|
||||
# depends_on('hip@:6.0', when='amdgpu_target=gfx701')
|
||||
# to indicate minimum version for each architecture.
|
||||
|
||||
# Add compiler minimum versions based on the first release where the
|
||||
# processor is included in llvm/lib/Support/TargetParser.cpp
|
||||
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx900:xnack-")
|
||||
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx906:xnack-")
|
||||
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx908:xnack-")
|
||||
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx90c")
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a")
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
|
||||
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
|
||||
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
|
||||
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
|
||||
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")
|
||||
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1032")
|
||||
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1033")
|
||||
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx1034")
|
||||
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1035")
|
||||
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx1036")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1100")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1101")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1102")
|
||||
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1103")
|
||||
|
||||
# Compiler conflicts
|
||||
|
||||
# TODO: add conflicts statements along the lines of
|
||||
|
||||
@@ -46,10 +46,10 @@ class SConsBuilder(BaseBuilder):
|
||||
phases = ("build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("install_args", "build_test")
|
||||
legacy_methods = ("build_test",)
|
||||
|
||||
#: Same as legacy_methods, but the signature is different
|
||||
legacy_long_methods = ("build_args",)
|
||||
legacy_long_methods = ("build_args", "install_args")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("build_time_test_callbacks",)
|
||||
@@ -66,13 +66,13 @@ def build(self, pkg, spec, prefix):
|
||||
args = self.build_args(spec, prefix)
|
||||
inspect.getmodule(self.pkg).scons(*args)
|
||||
|
||||
def install_args(self):
|
||||
def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
args = self.install_args(spec, prefix)
|
||||
|
||||
inspect.getmodule(self.pkg).scons("install", *args)
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ def configure(self, pkg, spec, prefix):
|
||||
]
|
||||
)
|
||||
|
||||
self.python(configure, *args)
|
||||
self.pkg.python(configure, *args)
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
|
||||
@@ -14,7 +14,7 @@ class SourceforgePackage(spack.package_base.PackageBase):
|
||||
packages."""
|
||||
|
||||
#: Path of the package in a Sourceforge mirror
|
||||
sourceforge_mirror_path = None # type: Optional[str]
|
||||
sourceforge_mirror_path: Optional[str] = None
|
||||
|
||||
#: List of Sourceforge mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user