Compare commits
1079 Commits
v0.17.2.1-
...
cws/simmod
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
44309e3b0b | ||
|
|
e7be8dbbcf | ||
|
|
e3aca44601 | ||
|
|
43673fee80 | ||
|
|
918637b943 | ||
|
|
5c59e9746a | ||
|
|
b065d69136 | ||
|
|
710ec58df0 | ||
|
|
70be612f43 | ||
|
|
c589f97cf0 | ||
|
|
ad4f551c22 | ||
|
|
37134aa331 | ||
|
|
bb1632c99d | ||
|
|
117c7cc3db | ||
|
|
7b95e2f050 | ||
|
|
03c1962252 | ||
|
|
268ec998c0 | ||
|
|
caee7341dd | ||
|
|
ac15ebec76 | ||
|
|
914ce8508a | ||
|
|
17b6bb9d06 | ||
|
|
6f8d7390f1 | ||
|
|
123c405e11 | ||
|
|
6f46345b60 | ||
|
|
49ae847c6c | ||
|
|
3cb6fd140c | ||
|
|
64b41b012c | ||
|
|
3d0347ddd3 | ||
|
|
875b032151 | ||
|
|
c7e49ea5a0 | ||
|
|
a1f07b68a8 | ||
|
|
c4efeab976 | ||
|
|
7741bfb7d1 | ||
|
|
d25315c2f6 | ||
|
|
8e00aa7c79 | ||
|
|
752697add2 | ||
|
|
f7ed9092e5 | ||
|
|
5da8bae2a9 | ||
|
|
a78e8bc54e | ||
|
|
cdf541ac4a | ||
|
|
0e783b2c47 | ||
|
|
9c5ef2aafe | ||
|
|
1f7ee4957b | ||
|
|
6fbb01fe31 | ||
|
|
a36c05068f | ||
|
|
16e079aa52 | ||
|
|
79f67ba92b | ||
|
|
7f2b5e8e57 | ||
|
|
6ceb274de0 | ||
|
|
8e25fd4f16 | ||
|
|
3fc8c3d18b | ||
|
|
bfe058aa57 | ||
|
|
45b77a2215 | ||
|
|
7dd22e93c6 | ||
|
|
cdb7c7a1db | ||
|
|
ea91e8f4ca | ||
|
|
74e2625dcf | ||
|
|
a4f0522468 | ||
|
|
ff67e10f60 | ||
|
|
c8931e390a | ||
|
|
b2c519470b | ||
|
|
9087224e04 | ||
|
|
110a35a520 | ||
|
|
ea9151b2a5 | ||
|
|
9708afc3b3 | ||
|
|
d904a789d5 | ||
|
|
7917f87c63 | ||
|
|
e33a57ab65 | ||
|
|
7f830295ae | ||
|
|
89b8d33d05 | ||
|
|
25f198aa91 | ||
|
|
5bd1074afb | ||
|
|
18b51e19df | ||
|
|
21822f233c | ||
|
|
5f7f8ce1eb | ||
|
|
ac09607a5a | ||
|
|
e0bbf26f1f | ||
|
|
1d827a9ff6 | ||
|
|
ba0cff1831 | ||
|
|
12699787e7 | ||
|
|
c020894f36 | ||
|
|
05a32b5cf5 | ||
|
|
359d094528 | ||
|
|
92dfe7f25b | ||
|
|
2159ce036e | ||
|
|
7e6dbfb185 | ||
|
|
4043b13d59 | ||
|
|
ec7513c8b5 | ||
|
|
44b48cfe46 | ||
|
|
a12ce9fd59 | ||
|
|
bb92ea59a2 | ||
|
|
626b239c96 | ||
|
|
211030cd19 | ||
|
|
3787f0c64c | ||
|
|
be65e4471e | ||
|
|
8745009492 | ||
|
|
7c5116a73a | ||
|
|
e6bdbde54e | ||
|
|
386f08c1b4 | ||
|
|
9c437e2a10 | ||
|
|
d393260a11 | ||
|
|
1b919d306f | ||
|
|
ad95b0d01d | ||
|
|
6b1e86aecc | ||
|
|
3338d536f6 | ||
|
|
6cbcc94fc9 | ||
|
|
c7ac0b9aa6 | ||
|
|
5efe0822b8 | ||
|
|
5d97c816ca | ||
|
|
e68ed3268b | ||
|
|
9a711e6ffa | ||
|
|
fcea26ca23 | ||
|
|
687cac3a79 | ||
|
|
d9f49d7415 | ||
|
|
ccb5432177 | ||
|
|
dfb0c6c51d | ||
|
|
8d1c723576 | ||
|
|
8037c91403 | ||
|
|
30e5828f06 | ||
|
|
12fb842bea | ||
|
|
37c9974755 | ||
|
|
86a127788d | ||
|
|
613362c42d | ||
|
|
b7fb5dbb28 | ||
|
|
dac31ef3c4 | ||
|
|
bb3a663392 | ||
|
|
d05ab36ff9 | ||
|
|
f6d701ca26 | ||
|
|
ffcd9282b5 | ||
|
|
28c5c3285a | ||
|
|
6dc708670c | ||
|
|
d71bbf036f | ||
|
|
8cf7241b64 | ||
|
|
f932a832ac | ||
|
|
a183b343b9 | ||
|
|
08fa0c9d46 | ||
|
|
0b3a409746 | ||
|
|
18ab94de4b | ||
|
|
f217ca2059 | ||
|
|
ac5fd8fa35 | ||
|
|
2eddf8a78d | ||
|
|
5b4927a3de | ||
|
|
d068b59856 | ||
|
|
f2789fa5c5 | ||
|
|
f5b0e264fc | ||
|
|
1f696951e7 | ||
|
|
2508aa1ab4 | ||
|
|
eed265f593 | ||
|
|
bc215c7504 | ||
|
|
87b8321fa9 | ||
|
|
8b4de69f6a | ||
|
|
3e85d3fd03 | ||
|
|
06f4f43915 | ||
|
|
4a6b447465 | ||
|
|
43654078fc | ||
|
|
8cd542529a | ||
|
|
861edcf996 | ||
|
|
60c533b80b | ||
|
|
60d94f2782 | ||
|
|
8e591f058e | ||
|
|
4397ec6a48 | ||
|
|
cbce77b3d9 | ||
|
|
03843db00f | ||
|
|
e046dc3ca4 | ||
|
|
eec78f7702 | ||
|
|
abd6241978 | ||
|
|
eb6564bc33 | ||
|
|
abeee9e1fd | ||
|
|
ec89ab1c27 | ||
|
|
e6678e5f12 | ||
|
|
b200c57739 | ||
|
|
814418db35 | ||
|
|
53001cc28c | ||
|
|
93c16f1ee3 | ||
|
|
7bacde16e4 | ||
|
|
44d9931cdd | ||
|
|
54b7df0bfa | ||
|
|
ee680567a5 | ||
|
|
6a1bbc1658 | ||
|
|
6f3e61b179 | ||
|
|
a9c246ed67 | ||
|
|
557abe04ec | ||
|
|
58b144c0da | ||
|
|
768d3504dd | ||
|
|
a98806bd9b | ||
|
|
ebf0f6281d | ||
|
|
0de44a20f6 | ||
|
|
62cac911e0 | ||
|
|
8bdc506579 | ||
|
|
52eaedfca2 | ||
|
|
5f5d6a84dc | ||
|
|
170c605d6a | ||
|
|
38dfec42b7 | ||
|
|
24048e3be7 | ||
|
|
f1d8b711ed | ||
|
|
ee21873616 | ||
|
|
ba6f4b3f8b | ||
|
|
e5ceb68850 | ||
|
|
505bcc5aba | ||
|
|
33fee003b8 | ||
|
|
1dfdaa1025 | ||
|
|
a8d0ef43fa | ||
|
|
f948ec07cc | ||
|
|
2943d53115 | ||
|
|
99bfd7059e | ||
|
|
129880d2ed | ||
|
|
8f7f96ae2b | ||
|
|
15f736e104 | ||
|
|
8f3aa27e25 | ||
|
|
cd54a3e908 | ||
|
|
8965d8f661 | ||
|
|
899dfdb416 | ||
|
|
24a03e5a50 | ||
|
|
662d5066f1 | ||
|
|
616d7454fc | ||
|
|
2683040c09 | ||
|
|
63b33403db | ||
|
|
6a5bec8d43 | ||
|
|
7dba08bd83 | ||
|
|
c533bbdfb0 | ||
|
|
4f203eb50f | ||
|
|
f6d4000e58 | ||
|
|
c5fc16ddd7 | ||
|
|
df44045fdb | ||
|
|
7dd2ca0207 | ||
|
|
7aedf9da37 | ||
|
|
5b627a9353 | ||
|
|
7fc68240fe | ||
|
|
614a014948 | ||
|
|
f7ef064bdc | ||
|
|
0080f6b7a5 | ||
|
|
0471c1ad5d | ||
|
|
1ba4ea5f20 | ||
|
|
8be7257990 | ||
|
|
6628272765 | ||
|
|
78882c5569 | ||
|
|
7bce92ad46 | ||
|
|
a6b0de3beb | ||
|
|
11d71ca85e | ||
|
|
5698846619 | ||
|
|
f09f834a8f | ||
|
|
147f39d7aa | ||
|
|
9fad3ed5f1 | ||
|
|
1375b1975b | ||
|
|
c533612ab6 | ||
|
|
e5cc3c51d1 | ||
|
|
70650cacbd | ||
|
|
d8a5638d9f | ||
|
|
158f6ddb14 | ||
|
|
86d1a6f2fc | ||
|
|
ca10ff1649 | ||
|
|
19175b9bab | ||
|
|
7d64a8e81f | ||
|
|
a0b68a0baa | ||
|
|
b7e0692dfa | ||
|
|
8cc62ef916 | ||
|
|
2363aba710 | ||
|
|
5a0c56229c | ||
|
|
96bb72e412 | ||
|
|
7c977733f1 | ||
|
|
d48a760837 | ||
|
|
c64298a183 | ||
|
|
31d89e80bc | ||
|
|
2844f1fc45 | ||
|
|
8b85f8ec58 | ||
|
|
7ef52acfce | ||
|
|
c3fecfb103 | ||
|
|
ef278c601c | ||
|
|
5e9a8d27f7 | ||
|
|
ffc90f944c | ||
|
|
699c0cf9a0 | ||
|
|
55cac3b098 | ||
|
|
35d07a6f18 | ||
|
|
1f42a5e598 | ||
|
|
967463432b | ||
|
|
bc03b5caaf | ||
|
|
649760dc1a | ||
|
|
77118427b1 | ||
|
|
1a024963a2 | ||
|
|
aebb601b70 | ||
|
|
d95f53e1ac | ||
|
|
0fe57962b3 | ||
|
|
8ec451aa7d | ||
|
|
14349cb882 | ||
|
|
bc438ed4e9 | ||
|
|
760a12c440 | ||
|
|
9325c8e53f | ||
|
|
fd5b7d6cce | ||
|
|
b19a0744f1 | ||
|
|
75d0c0dff7 | ||
|
|
aedc41c3a0 | ||
|
|
1f39d6d916 | ||
|
|
1bc3b0a926 | ||
|
|
62596fb796 | ||
|
|
15c35a3cff | ||
|
|
249c90f909 | ||
|
|
043b2cbb7c | ||
|
|
aedf215b90 | ||
|
|
eed4a63be7 | ||
|
|
13b609b4b6 | ||
|
|
85dc20cb55 | ||
|
|
667c39987c | ||
|
|
f35d8c4102 | ||
|
|
6ac3186132 | ||
|
|
466572dc14 | ||
|
|
1c0bf12e5b | ||
|
|
bf990bc8ec | ||
|
|
267358a799 | ||
|
|
392b548312 | ||
|
|
8c3b82c140 | ||
|
|
2e4b533420 | ||
|
|
3b393fe0eb | ||
|
|
895ceeda38 | ||
|
|
6f1e3a4ad3 | ||
|
|
ce41b7457b | ||
|
|
4f9f56630b | ||
|
|
e216ba1520 | ||
|
|
4fd5ee1d9d | ||
|
|
89f32d51f3 | ||
|
|
3b3be70226 | ||
|
|
95888602f2 | ||
|
|
0205fefe0c | ||
|
|
b261b2a5ff | ||
|
|
8c58c14c3d | ||
|
|
f7b4d488c3 | ||
|
|
38a8d4d2fe | ||
|
|
1ceee714db | ||
|
|
5b3e4ba3f8 | ||
|
|
37426e41cb | ||
|
|
a05e729e1b | ||
|
|
f8a6799e67 | ||
|
|
633ebd149c | ||
|
|
bf6220821b | ||
|
|
3db96aa892 | ||
|
|
c7628d768a | ||
|
|
e1ad926189 | ||
|
|
8705735c74 | ||
|
|
d82fa045d5 | ||
|
|
c72d51cb3a | ||
|
|
d76f184430 | ||
|
|
19ea24d2bd | ||
|
|
83efea32f4 | ||
|
|
bc577f2dee | ||
|
|
04529fbe80 | ||
|
|
cdfbe2c25d | ||
|
|
8305742d75 | ||
|
|
e0137b1566 | ||
|
|
be95699a55 | ||
|
|
7d96a0aa5a | ||
|
|
874b713edf | ||
|
|
3c0a98c5ab | ||
|
|
998bf90b35 | ||
|
|
2ca32fbc8c | ||
|
|
e50d08ce48 | ||
|
|
696d81513d | ||
|
|
b38afa7528 | ||
|
|
0f2786c9d3 | ||
|
|
f72c2ab583 | ||
|
|
f6e6403fd1 | ||
|
|
5550271ead | ||
|
|
a1b19345b1 | ||
|
|
e2aeb06c91 | ||
|
|
14ae0e0d94 | ||
|
|
516587a1da | ||
|
|
dc36fd87bb | ||
|
|
06b5141d01 | ||
|
|
27610838dd | ||
|
|
0c7fd9bd8c | ||
|
|
bf2b30a5f5 | ||
|
|
f42680b785 | ||
|
|
a2afd5b82f | ||
|
|
11f1b371f7 | ||
|
|
cfc46504ac | ||
|
|
163251aa65 | ||
|
|
6ab9f3a290 | ||
|
|
45043bcdf5 | ||
|
|
7642fa3d99 | ||
|
|
1689f7fcbe | ||
|
|
4f67afeb5f | ||
|
|
9d8ecffed0 | ||
|
|
1ada7ea809 | ||
|
|
4a8db00691 | ||
|
|
01f8236bf5 | ||
|
|
57822d3014 | ||
|
|
3fdb3f832a | ||
|
|
bedad508a9 | ||
|
|
31f6dedecd | ||
|
|
dc0c4959db | ||
|
|
8550b6cf49 | ||
|
|
0e5c4c9cbf | ||
|
|
9bac72e818 | ||
|
|
a65a217c54 | ||
|
|
26e6aae8d4 | ||
|
|
d6fabd1533 | ||
|
|
d245c46487 | ||
|
|
3eba28e383 | ||
|
|
0d91cb58dc | ||
|
|
5f55abeecb | ||
|
|
c3c2416672 | ||
|
|
9e2bc41e45 | ||
|
|
8f80c5e6f7 | ||
|
|
a5e1367882 | ||
|
|
c724e26ba9 | ||
|
|
1b9a1992fb | ||
|
|
815764bdef | ||
|
|
adf073b53c | ||
|
|
449e885d4c | ||
|
|
560472ce3a | ||
|
|
0c66446437 | ||
|
|
6b4b1dacd9 | ||
|
|
3e90134e14 | ||
|
|
4ad0594c7b | ||
|
|
f13b760f10 | ||
|
|
0f6fb1a706 | ||
|
|
e74d85a524 | ||
|
|
ffd63c5de1 | ||
|
|
ed263615d7 | ||
|
|
b4e775a11a | ||
|
|
6a2844fdee | ||
|
|
44d670a8ce | ||
|
|
4a0ac87d07 | ||
|
|
2c6898c717 | ||
|
|
0a8083c604 | ||
|
|
5b45df5269 | ||
|
|
9d7cc43673 | ||
|
|
932065beca | ||
|
|
360192cbfe | ||
|
|
7bc349c041 | ||
|
|
603ec40ab1 | ||
|
|
ed6695b9c9 | ||
|
|
fb173f80b2 | ||
|
|
15d4262b9b | ||
|
|
7116fb1b70 | ||
|
|
9b713fa6a6 | ||
|
|
8d4a5cb247 | ||
|
|
bf2d44c87e | ||
|
|
926e311f3c | ||
|
|
8ef937032c | ||
|
|
73ce789390 | ||
|
|
e4f3cfcc3a | ||
|
|
7ac05485c6 | ||
|
|
13b0e73a4e | ||
|
|
03c54aebdd | ||
|
|
f4a4b3fa87 | ||
|
|
6b3607287a | ||
|
|
b4b2585d67 | ||
|
|
29855ae31e | ||
|
|
44b9efa132 | ||
|
|
f21e26b904 | ||
|
|
73865c38f9 | ||
|
|
38ccefbe84 | ||
|
|
1bd33d88bd | ||
|
|
67ad23cc11 | ||
|
|
8640b50258 | ||
|
|
043cc688ef | ||
|
|
e52527029a | ||
|
|
a7d2f76ac5 | ||
|
|
fbb134b1af | ||
|
|
548e9ae88c | ||
|
|
5728ba0122 | ||
|
|
2bda10edb5 | ||
|
|
bd15ca4f16 | ||
|
|
f9dfd5fcb8 | ||
|
|
f3c4e1adbb | ||
|
|
b2d3ed9096 | ||
|
|
dac5fec255 | ||
|
|
9784b8f926 | ||
|
|
adef8f6ca7 | ||
|
|
c36f15e29e | ||
|
|
03531ed904 | ||
|
|
54332b2d83 | ||
|
|
165f42b7ce | ||
|
|
1190d03b0f | ||
|
|
5faa927fe6 | ||
|
|
c60d220f81 | ||
|
|
61d3d60414 | ||
|
|
174258c09a | ||
|
|
73c6a8f73d | ||
|
|
86dc904080 | ||
|
|
9e1c87409d | ||
|
|
2b30dc2e30 | ||
|
|
b1ce756d69 | ||
|
|
1194ac6985 | ||
|
|
954f961208 | ||
|
|
47ac710796 | ||
|
|
d7fb5a6db4 | ||
|
|
e0624b9278 | ||
|
|
e86614f7b8 | ||
|
|
d166b948ce | ||
|
|
9cc3a2942d | ||
|
|
5d685f9ff6 | ||
|
|
9ddf45964d | ||
|
|
b88cc77f16 | ||
|
|
f3af38ba9b | ||
|
|
adc9f887ea | ||
|
|
9461f482d9 | ||
|
|
e014b889c6 | ||
|
|
181ac574bb | ||
|
|
055c9d125d | ||
|
|
a94438b1f5 | ||
|
|
f583e471b8 | ||
|
|
f67f3b1796 | ||
|
|
77c86c759c | ||
|
|
8084259bd3 | ||
|
|
98860c6a5f | ||
|
|
e6929b9ff9 | ||
|
|
18c2f1a57a | ||
|
|
3054cd0eff | ||
|
|
9016b79270 | ||
|
|
9f5c6fb398 | ||
|
|
19087c9d35 | ||
|
|
4116b04368 | ||
|
|
1485931695 | ||
|
|
78cac4d840 | ||
|
|
2f628c3a97 | ||
|
|
a3a8710cbe | ||
|
|
0bf3a9c2af | ||
|
|
cff955f7bd | ||
|
|
3d43ebec72 | ||
|
|
6fd07479e3 | ||
|
|
03bc36f8b0 | ||
|
|
93e1b283b7 | ||
|
|
df2c0fbfbd | ||
|
|
54a69587c3 | ||
|
|
294312f02b | ||
|
|
0636fdbfef | ||
|
|
85e13260cf | ||
|
|
b5a519fa51 | ||
|
|
2e2d0b3211 | ||
|
|
d51f949768 | ||
|
|
1b955e66c1 | ||
|
|
5f8a3527e7 | ||
|
|
ec02369dba | ||
|
|
8ceac2ba9b | ||
|
|
85eeed650e | ||
|
|
14d4203722 | ||
|
|
1bc742c13e | ||
|
|
2712ea6299 | ||
|
|
a9c064cd7e | ||
|
|
17fc244cba | ||
|
|
334c786b52 | ||
|
|
492541b9cb | ||
|
|
369f825523 | ||
|
|
aba9149b71 | ||
|
|
b29f27aec7 | ||
|
|
0176d9830d | ||
|
|
0c9370ce72 | ||
|
|
3620204db6 | ||
|
|
13984a4e8d | ||
|
|
d5c68fdc0d | ||
|
|
93649f6b68 | ||
|
|
d367f1e787 | ||
|
|
1c44999192 | ||
|
|
ad506ac2a8 | ||
|
|
806521b4a0 | ||
|
|
70824e4a5e | ||
|
|
0fe5e72744 | ||
|
|
ba907defca | ||
|
|
87b078d1f3 | ||
|
|
54ea1f4bf6 | ||
|
|
067800bc31 | ||
|
|
0d2eae8da0 | ||
|
|
f2a81af70e | ||
|
|
494e567fe5 | ||
|
|
6a57aede57 | ||
|
|
ba701a7cf8 | ||
|
|
557845cccc | ||
|
|
c5297523af | ||
|
|
6883868896 | ||
|
|
1c5587f72d | ||
|
|
6e7eb49888 | ||
|
|
3df4a32c4f | ||
|
|
95b03e7bc9 | ||
|
|
817ee81eaa | ||
|
|
330832c22c | ||
|
|
306bed48d7 | ||
|
|
63402c512b | ||
|
|
736fddc079 | ||
|
|
036048c26f | ||
|
|
8616ba04db | ||
|
|
07e9c0695a | ||
|
|
f24886acb5 | ||
|
|
5031578c39 | ||
|
|
7c4cc1c71c | ||
|
|
f7258e246f | ||
|
|
ff980a1452 | ||
|
|
51130abf86 | ||
|
|
383356452b | ||
|
|
5fc1547886 | ||
|
|
68cd6c72c7 | ||
|
|
3d2ff57e7b | ||
|
|
3bc656808c | ||
|
|
7ded692a76 | ||
|
|
aa3c7a138a | ||
|
|
42441cddcc | ||
|
|
b78025345b | ||
|
|
2113b625d1 | ||
|
|
c6c3d243e1 | ||
|
|
870b997cb6 | ||
|
|
c9492f1cd4 | ||
|
|
24f370491e | ||
|
|
d688a699fa | ||
|
|
4fbb822072 | ||
|
|
f86c481280 | ||
|
|
91a99882b3 | ||
|
|
74bef2105a | ||
|
|
630ebb9d8b | ||
|
|
183465321e | ||
|
|
580f9ec86e | ||
|
|
0b0920bc90 | ||
|
|
ee04a1ab0b | ||
|
|
55f4950ed4 | ||
|
|
23960ed623 | ||
|
|
fb2730d87f | ||
|
|
30f2394782 | ||
|
|
262c3f07bf | ||
|
|
b018eb041f | ||
|
|
3f4398dd67 | ||
|
|
a225a5b276 | ||
|
|
c9cfc548da | ||
|
|
3b30886a3a | ||
|
|
c2fd98ccd2 | ||
|
|
a0fe6ab2ed | ||
|
|
c3be777ea8 | ||
|
|
8fe39be3df | ||
|
|
f5250da611 | ||
|
|
c2af154cd2 | ||
|
|
1f6b880fff | ||
|
|
2c211d95ee | ||
|
|
c46f673c16 | ||
|
|
8ff2b4b747 | ||
|
|
9e05dde28c | ||
|
|
b1ef5a75f0 | ||
|
|
f9aa7c611c | ||
|
|
9a2e01e22d | ||
|
|
2090351d7f | ||
|
|
c775c322ec | ||
|
|
1185eb9199 | ||
|
|
51fa8e7b5e | ||
|
|
f505c50770 | ||
|
|
2fdc817f03 | ||
|
|
e1d0b35d5b | ||
|
|
ace5a7c4bf | ||
|
|
a91ae8cafe | ||
|
|
b9e3ee6dd0 | ||
|
|
10ea0a2a3e | ||
|
|
d6f8ffc6bc | ||
|
|
02be2f27d1 | ||
|
|
dfd0702aec | ||
|
|
f454a683b5 | ||
|
|
d7d0c892d8 | ||
|
|
d566330a33 | ||
|
|
446cbf4b5a | ||
|
|
5153c9e98c | ||
|
|
d74f2d0be5 | ||
|
|
021b65d76f | ||
|
|
45312d49be | ||
|
|
3fcd85efe9 | ||
|
|
6f3a082c3e | ||
|
|
23e2820547 | ||
|
|
22b999fcd4 | ||
|
|
1df7de62ca | ||
|
|
97ec8f1d19 | ||
|
|
63b6e484fc | ||
|
|
2b12d19314 | ||
|
|
c37fcccd7c | ||
|
|
6034b5afc2 | ||
|
|
17bc937083 | ||
|
|
ad8db0680d | ||
|
|
4f033b155b | ||
|
|
ad829ccee1 | ||
|
|
4b60a17174 | ||
|
|
edb91f4077 | ||
|
|
0fdc3bf420 | ||
|
|
8b34cabb16 | ||
|
|
77fb651e01 | ||
|
|
35ed7973e2 | ||
|
|
e73b19024f | ||
|
|
7803bc9e5f | ||
|
|
55c400297c | ||
|
|
8686e18494 | ||
|
|
d28967bbf3 | ||
|
|
5f928f71c0 | ||
|
|
dc7bdf5f24 | ||
|
|
a681fd7b42 | ||
|
|
f40f1b5c7c | ||
|
|
edd3cf0b17 | ||
|
|
ff03e2ef4c | ||
|
|
bee311edf3 | ||
|
|
73b69cfeec | ||
|
|
4a1041dbc3 | ||
|
|
ccab7bf4fd | ||
|
|
e24e71be6a | ||
|
|
72d83a6f94 | ||
|
|
5cb40cbcd2 | ||
|
|
c93e465134 | ||
|
|
521c206030 | ||
|
|
15eb98368d | ||
|
|
7c1d566959 | ||
|
|
7ab46e26b5 | ||
|
|
6db215dd89 | ||
|
|
72b38851eb | ||
|
|
9d9e970367 | ||
|
|
283a4e6068 | ||
|
|
d20cc7b124 | ||
|
|
0dd373846f | ||
|
|
c202953528 | ||
|
|
be0e3f4458 | ||
|
|
fd3bb5177b | ||
|
|
9de61c0197 | ||
|
|
84cfb3b7fe | ||
|
|
cb0d12b9d5 | ||
|
|
f6e7c0b740 | ||
|
|
512645ff2e | ||
|
|
32a2c22b2b | ||
|
|
e02020c80a | ||
|
|
d900ac2003 | ||
|
|
faa277778e | ||
|
|
b60d3dcd29 | ||
|
|
e0bed2d6a7 | ||
|
|
745c191d73 | ||
|
|
42e9430fbc | ||
|
|
f11572166f | ||
|
|
aa6665d5ee | ||
|
|
3e8f31a068 | ||
|
|
3625ea4726 | ||
|
|
245b95223d | ||
|
|
e00c8a7d98 | ||
|
|
97792f04e9 | ||
|
|
ca069f6906 | ||
|
|
35a91bdd72 | ||
|
|
c866a50446 | ||
|
|
3033abb5bd | ||
|
|
d37f439557 | ||
|
|
18710936f1 | ||
|
|
12b0278f08 | ||
|
|
34fd6e36ce | ||
|
|
363536fd92 | ||
|
|
d57d343b6d | ||
|
|
1b254d19c4 | ||
|
|
82b916be36 | ||
|
|
66d3648200 | ||
|
|
d2fc7b9f7d | ||
|
|
1067749371 | ||
|
|
8bd893367d | ||
|
|
2ed542b744 | ||
|
|
5cb7a5db45 | ||
|
|
f84991b5a8 | ||
|
|
928ecd1f4e | ||
|
|
72e594fb10 | ||
|
|
f9d701f9cf | ||
|
|
63f7053fe8 | ||
|
|
1c51d6313b | ||
|
|
c164e6fe03 | ||
|
|
2d823dcf90 | ||
|
|
a12c638224 | ||
|
|
0739691688 | ||
|
|
62ffc8c1dd | ||
|
|
61969566f8 | ||
|
|
49948bb3e7 | ||
|
|
c9c347f0f0 | ||
|
|
ee9b61be7a | ||
|
|
be45292a9c | ||
|
|
61f8c97bb7 | ||
|
|
b0403624cf | ||
|
|
4baed234be | ||
|
|
9de1edee80 | ||
|
|
9ce726eed5 | ||
|
|
c45ee381bd | ||
|
|
56d76766b7 | ||
|
|
6c309bbb32 | ||
|
|
cfed42ecfc | ||
|
|
01b79abcdf | ||
|
|
4d84c774d1 | ||
|
|
00e9780136 | ||
|
|
a65e00392c | ||
|
|
250fa6dada | ||
|
|
dd7822fdf7 | ||
|
|
555202833f | ||
|
|
b76fc61deb | ||
|
|
186abe525e | ||
|
|
31d8607b3c | ||
|
|
622841063c | ||
|
|
23b7071bb0 | ||
|
|
359229f5f8 | ||
|
|
2db545ffdc | ||
|
|
3986ac3828 | ||
|
|
c47c5d75e4 | ||
|
|
a0d4630448 | ||
|
|
7f1659786b | ||
|
|
b5da0d02bf | ||
|
|
8575afac4e | ||
|
|
7997dfcf80 | ||
|
|
19c8e02e32 | ||
|
|
320e6e06e6 | ||
|
|
d7b66dd286 | ||
|
|
70a8b91ec2 | ||
|
|
1883fedae7 | ||
|
|
c4412306da | ||
|
|
c49508648a | ||
|
|
9bcf496f21 | ||
|
|
060e88387e | ||
|
|
a24bae1986 | ||
|
|
d08520cb15 | ||
|
|
5397dcee51 | ||
|
|
d62e4b1d66 | ||
|
|
9ed1c76486 | ||
|
|
10efbc071f | ||
|
|
d517dcdc71 | ||
|
|
104d60887f | ||
|
|
1bde91735b | ||
|
|
335083d2dc | ||
|
|
0858c281e4 | ||
|
|
adc8a2ca00 | ||
|
|
dc6d45c8b4 | ||
|
|
fcaf9c8cdf | ||
|
|
1f74dc63dc | ||
|
|
ceaad43e54 | ||
|
|
4c8eb92314 | ||
|
|
89775e32c0 | ||
|
|
5ab526185a | ||
|
|
654a07d642 | ||
|
|
dfdbd1151d | ||
|
|
22f3ef0a21 | ||
|
|
165bcf5cc3 | ||
|
|
27462bc982 | ||
|
|
c2afb4b916 | ||
|
|
6c6685b5fa | ||
|
|
17c32811fb | ||
|
|
ad1391db75 | ||
|
|
d8e010a9f5 | ||
|
|
5a55e78073 | ||
|
|
c1007efe5a | ||
|
|
2f14695882 | ||
|
|
afc2d4284a | ||
|
|
999eee64b8 | ||
|
|
5d0f2bb461 | ||
|
|
ec295a13fd | ||
|
|
1b1770ea9e | ||
|
|
d3a0ac1c0a | ||
|
|
3137e7c61b | ||
|
|
202214d855 | ||
|
|
b6e1cbd86d | ||
|
|
e6d1c2d9f3 | ||
|
|
1a368419da | ||
|
|
6898b7c2f6 | ||
|
|
2836648904 | ||
|
|
0dd9e5c86f | ||
|
|
011a491b16 | ||
|
|
c9714533f3 | ||
|
|
faeffdfaf2 | ||
|
|
a15a69a769 | ||
|
|
9a33121859 | ||
|
|
e88396e5ed | ||
|
|
dcd2f8a4ed | ||
|
|
381ec8abac | ||
|
|
fd6d226524 | ||
|
|
cef9245ee1 | ||
|
|
24fecdc738 | ||
|
|
4a228055e2 | ||
|
|
17ede26cea | ||
|
|
8bcccbeac7 | ||
|
|
e6346eb033 | ||
|
|
a5d06325e7 | ||
|
|
9bb23a7f46 | ||
|
|
5a434cb840 | ||
|
|
0be5dea13f | ||
|
|
b6f2a70f7b | ||
|
|
0d3d1ea7d0 | ||
|
|
39c4a66e5b | ||
|
|
0bd0ba53a3 | ||
|
|
e24373f262 | ||
|
|
b7f33fb393 | ||
|
|
250d5d2c00 | ||
|
|
8b0f6187e0 | ||
|
|
8bf988abb9 | ||
|
|
aab7dcaad9 | ||
|
|
5b68fa1ecb | ||
|
|
4d03a2768e | ||
|
|
6b6147d5a0 | ||
|
|
6702e87ee4 | ||
|
|
7ad5ca2cc3 | ||
|
|
2418cfb79b | ||
|
|
9486c76d70 | ||
|
|
dc99fe98b9 | ||
|
|
cb97b25646 | ||
|
|
a84593a510 | ||
|
|
2f6556ea82 | ||
|
|
b4213b2c60 | ||
|
|
e378d96d15 | ||
|
|
715686f0ec | ||
|
|
d0fdaf6d03 | ||
|
|
b8ebaa0813 | ||
|
|
5c7d6c6e10 | ||
|
|
96f9a1d88b | ||
|
|
f5eb9fb501 | ||
|
|
dd8f533e97 | ||
|
|
bab41de538 | ||
|
|
17c1808ef7 | ||
|
|
157ee3458f | ||
|
|
3ce8bff22e | ||
|
|
281165693a | ||
|
|
f8653dfb9b | ||
|
|
a6589daa87 | ||
|
|
43a503c195 | ||
|
|
1eff83990b | ||
|
|
17e71a675a | ||
|
|
872aa32a00 | ||
|
|
4c19410669 | ||
|
|
a5e92893d3 | ||
|
|
bdef031d4e | ||
|
|
a10f5656ab | ||
|
|
653ed78645 | ||
|
|
bc7fc8f456 | ||
|
|
55bbbe8657 | ||
|
|
a6dcce4cf2 | ||
|
|
e301de98cb | ||
|
|
6ecee4e6d5 | ||
|
|
2d89bc350c | ||
|
|
14e5497758 | ||
|
|
4576fbe648 | ||
|
|
84611b5f29 | ||
|
|
cbf0c3a8c4 | ||
|
|
d6c1619b67 | ||
|
|
d23d611f35 | ||
|
|
8ed8922af5 | ||
|
|
01c17562f5 | ||
|
|
c97d931ea7 | ||
|
|
21bf0cf43c | ||
|
|
e2c72e583f | ||
|
|
4219b89faa | ||
|
|
253b208537 | ||
|
|
4509e96704 | ||
|
|
f0bb7c74a9 | ||
|
|
c93fd4c600 | ||
|
|
76d9df2cf1 | ||
|
|
d2f67ff7b9 | ||
|
|
b5b62b0c82 | ||
|
|
e691d6df64 | ||
|
|
25206c86c4 | ||
|
|
144d7cd932 | ||
|
|
e7eceaf4e6 | ||
|
|
f943cc0149 | ||
|
|
879949b78e | ||
|
|
38e5b96431 | ||
|
|
9a028e3b15 | ||
|
|
f2c1b40e58 | ||
|
|
06b5217c01 | ||
|
|
52bf7f4157 | ||
|
|
31eb759892 | ||
|
|
b5f2c20f74 | ||
|
|
cf48588c45 | ||
|
|
e59cde9b7f | ||
|
|
7e54bddc0c | ||
|
|
7f1411d131 | ||
|
|
f0afceeb9c | ||
|
|
7ee15553e4 | ||
|
|
d062d2e92b | ||
|
|
7c631d1c55 | ||
|
|
6df71118fb | ||
|
|
ed7812b8be | ||
|
|
ff03ac3e06 | ||
|
|
b6ae2436be | ||
|
|
ec266a86b6 | ||
|
|
ecbac17217 | ||
|
|
43a84f58e9 | ||
|
|
9ed37742e9 | ||
|
|
dbe2c44a25 | ||
|
|
f40780310b | ||
|
|
c06f69d0bf | ||
|
|
8a6b73bb2c | ||
|
|
9e6298569e | ||
|
|
ad0430f463 | ||
|
|
7d26d56e59 | ||
|
|
06988c38fd | ||
|
|
f6d2b07368 | ||
|
|
bb43308c44 | ||
|
|
6a9df34abd | ||
|
|
1006dd54de | ||
|
|
cf905ec14a | ||
|
|
19bb4bdeb8 | ||
|
|
47f9e71302 | ||
|
|
2e9da47a2d | ||
|
|
3e863848f8 | ||
|
|
e7a0b952ab | ||
|
|
8b85b33ba5 | ||
|
|
90dafdd9f0 | ||
|
|
d989478154 | ||
|
|
ebd930ace9 | ||
|
|
53e0e7aabe | ||
|
|
27cbc4ebc8 | ||
|
|
2515cafb9c | ||
|
|
5593611b5e | ||
|
|
f544b051c4 | ||
|
|
c534e70950 | ||
|
|
9ffb3b4ac0 | ||
|
|
893c5271ac | ||
|
|
f7a9456553 | ||
|
|
8250235207 | ||
|
|
eb51591b02 | ||
|
|
06e7249850 | ||
|
|
c4ad003af2 | ||
|
|
1243717012 | ||
|
|
da3d315cba | ||
|
|
48ff4c7679 | ||
|
|
9fc6494a28 | ||
|
|
9d4cedac51 | ||
|
|
f66139dfe4 | ||
|
|
79e0a3dad0 | ||
|
|
3dbbf3a101 | ||
|
|
f273e7d329 | ||
|
|
351072cd9f | ||
|
|
0b23f1be05 | ||
|
|
b8015a71e9 | ||
|
|
739f040fa8 | ||
|
|
d5fc859f46 | ||
|
|
753f4a4bc3 | ||
|
|
8c280d98ca | ||
|
|
d5e1fa5771 | ||
|
|
f30b79b2c5 | ||
|
|
54146d44f9 | ||
|
|
0c31ab87c9 | ||
|
|
e49cccb0d9 | ||
|
|
7a1841c464 | ||
|
|
40ebeb2dc8 | ||
|
|
e5444b9a77 | ||
|
|
89cc16a9cd | ||
|
|
a24070d532 | ||
|
|
f473fd8084 | ||
|
|
5055f5e3e2 | ||
|
|
0b26103c07 | ||
|
|
f99614be02 | ||
|
|
c02e83092e | ||
|
|
85e99fa154 | ||
|
|
83b91246b1 | ||
|
|
0f4b228eff | ||
|
|
2abbad1ca5 | ||
|
|
43d8fe212f | ||
|
|
84befcdfe2 | ||
|
|
def8fce250 | ||
|
|
3a0aba0835 | ||
|
|
254cd624fe | ||
|
|
802a48fb43 | ||
|
|
c81affa551 | ||
|
|
3e51304b68 | ||
|
|
62abbeaf6f | ||
|
|
80e24f3f69 | ||
|
|
24f5069584 | ||
|
|
5da78991d4 | ||
|
|
cfb5d5f988 | ||
|
|
a10d262f5f | ||
|
|
16e926d374 | ||
|
|
b9d6a5103d | ||
|
|
834f8e04ca | ||
|
|
3ade5516a2 | ||
|
|
b41de6d86b | ||
|
|
c72fba7e4c | ||
|
|
96113a5dc6 | ||
|
|
db0335fa54 | ||
|
|
268c671dc8 | ||
|
|
3dd4999fd7 | ||
|
|
c1ed51e767 | ||
|
|
2a8a0aa156 | ||
|
|
5073613c6e | ||
|
|
4a76ca1f5e | ||
|
|
878e6b6712 | ||
|
|
aeba9daea6 | ||
|
|
25d115ba1a | ||
|
|
f37e07a882 | ||
|
|
4c0cc5a295 | ||
|
|
57968e7ad4 | ||
|
|
02a7fc69ed | ||
|
|
d729b4e72b | ||
|
|
35a4c2325e | ||
|
|
6253445b13 | ||
|
|
dea5fe87f7 | ||
|
|
6162ea95b0 | ||
|
|
44b409d696 | ||
|
|
a9fbc0175d |
142
.github/workflows/bootstrap.yml
vendored
142
.github/workflows/bootstrap.yml
vendored
@@ -12,6 +12,7 @@ on:
|
|||||||
# built-in repository or documentation
|
# built-in repository or documentation
|
||||||
- 'var/spack/repos/builtin/**'
|
- 'var/spack/repos/builtin/**'
|
||||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||||
|
- '!var/spack/repos/builtin/packages/clingo/**'
|
||||||
- '!var/spack/repos/builtin/packages/python/**'
|
- '!var/spack/repos/builtin/packages/python/**'
|
||||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||||
- 'lib/spack/docs/**'
|
- 'lib/spack/docs/**'
|
||||||
@@ -19,11 +20,16 @@ on:
|
|||||||
# nightly at 2:16 AM
|
# nightly at 2:16 AM
|
||||||
- cron: '16 2 * * *'
|
- cron: '16 2 * * *'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
fedora-clingo-sources:
|
fedora-clingo-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "fedora:latest"
|
container: "fedora:latest"
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -31,14 +37,20 @@ jobs:
|
|||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- name: Checkout
|
||||||
- name: Setup repo and non-root user
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- name: Setup non-root user
|
||||||
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
@@ -51,6 +63,7 @@ jobs:
|
|||||||
ubuntu-clingo-sources:
|
ubuntu-clingo-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "ubuntu:latest"
|
container: "ubuntu:latest"
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
env:
|
env:
|
||||||
@@ -61,22 +74,20 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Work around CVE-2022-24765
|
- name: Checkout
|
||||||
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- name: Setup non-root user
|
||||||
run: |
|
run: |
|
||||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
# See [1] below
|
||||||
# a breaking behavior. See:
|
|
||||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
|
||||||
# - https://github.com/actions/checkout/issues/760
|
|
||||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
- name: Setup repo and non-root user
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd -m spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
@@ -89,6 +100,7 @@ jobs:
|
|||||||
ubuntu-clingo-binaries-and-patchelf:
|
ubuntu-clingo-binaries-and-patchelf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "ubuntu:latest"
|
container: "ubuntu:latest"
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
env:
|
env:
|
||||||
@@ -98,22 +110,20 @@ jobs:
|
|||||||
apt-get install -y \
|
apt-get install -y \
|
||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Work around CVE-2022-24765
|
- name: Checkout
|
||||||
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- name: Setup non-root user
|
||||||
run: |
|
run: |
|
||||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
# See [1] below
|
||||||
# a breaking behavior. See:
|
|
||||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
|
||||||
# - https://github.com/actions/checkout/issues/760
|
|
||||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
- name: Setup repo and non-root user
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd -m spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
@@ -121,10 +131,10 @@ jobs:
|
|||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
|
||||||
opensuse-clingo-sources:
|
opensuse-clingo-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "opensuse/leap:latest"
|
container: "opensuse/leap:latest"
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -134,9 +144,12 @@ jobs:
|
|||||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- name: Checkout
|
||||||
- name: Setup repo and non-root user
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- name: Setup repo
|
||||||
run: |
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git --version
|
git --version
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
@@ -150,11 +163,13 @@ jobs:
|
|||||||
|
|
||||||
macos-clingo-sources:
|
macos-clingo-sources:
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison@2.7 tree
|
brew install cmake bison@2.7 tree
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- name: Checkout
|
||||||
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -165,16 +180,19 @@ jobs:
|
|||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-binaries:
|
macos-clingo-binaries:
|
||||||
runs-on: macos-latest
|
runs-on: ${{ matrix.macos-version }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||||
|
macos-version: ['macos-10.15', 'macos-11', 'macos-12']
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- name: Checkout
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
@@ -189,12 +207,14 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- name: Checkout
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
@@ -209,6 +229,7 @@ jobs:
|
|||||||
ubuntu-gnupg-binaries:
|
ubuntu-gnupg-binaries:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "ubuntu:latest"
|
container: "ubuntu:latest"
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
env:
|
env:
|
||||||
@@ -218,22 +239,20 @@ jobs:
|
|||||||
apt-get install -y \
|
apt-get install -y \
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Work around CVE-2022-24765
|
- name: Checkout
|
||||||
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- name: Setup non-root user
|
||||||
run: |
|
run: |
|
||||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
# See [1] below
|
||||||
# a breaking behavior. See:
|
|
||||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
|
||||||
# - https://github.com/actions/checkout/issues/760
|
|
||||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
- name: Setup repo and non-root user
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd -m spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
@@ -245,6 +264,7 @@ jobs:
|
|||||||
ubuntu-gnupg-sources:
|
ubuntu-gnupg-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "ubuntu:latest"
|
container: "ubuntu:latest"
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
env:
|
env:
|
||||||
@@ -255,22 +275,20 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
gawk
|
gawk
|
||||||
- name: Work around CVE-2022-24765
|
- name: Checkout
|
||||||
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
|
- name: Setup non-root user
|
||||||
run: |
|
run: |
|
||||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
# See [1] below
|
||||||
# a breaking behavior. See:
|
|
||||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
|
||||||
# - https://github.com/actions/checkout/issues/760
|
|
||||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
- name: Setup repo and non-root user
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd -m spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
@@ -282,13 +300,15 @@ jobs:
|
|||||||
|
|
||||||
macos-gnupg-binaries:
|
macos-gnupg-binaries:
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree
|
||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
- name: Checkout
|
||||||
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -298,13 +318,15 @@ jobs:
|
|||||||
|
|
||||||
macos-gnupg-sources:
|
macos-gnupg-sources:
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
brew install gawk tree
|
brew install gawk tree
|
||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
- name: Checkout
|
||||||
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -312,3 +334,11 @@ jobs:
|
|||||||
spack bootstrap untrust github-actions-v0.2
|
spack bootstrap untrust github-actions-v0.2
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
|
||||||
|
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||||
|
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||||
|
# See:
|
||||||
|
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||||
|
# - https://github.com/actions/checkout/issues/760
|
||||||
|
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||||
|
|||||||
21
.github/workflows/build-containers.yml
vendored
21
.github/workflows/build-containers.yml
vendored
@@ -19,6 +19,10 @@ on:
|
|||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy-images:
|
deploy-images:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -43,9 +47,10 @@ jobs:
|
|||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
|
|
||||||
- name: Set Container Tag Normal (Nightly)
|
- name: Set Container Tag Normal (Nightly)
|
||||||
run: |
|
run: |
|
||||||
@@ -75,33 +80,33 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
if: ${{ github.event_name != 'pull_request' }}
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a # @v2
|
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # @v2
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
|
|||||||
19
.github/workflows/macos_python.yml
vendored
19
.github/workflows/macos_python.yml
vendored
@@ -16,16 +16,21 @@ on:
|
|||||||
- '.github/workflows/macos_python.yml'
|
- '.github/workflows/macos_python.yml'
|
||||||
# TODO: run if we touch any of the recipes involved in this
|
# TODO: run if we touch any of the recipes involved in this
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
# GitHub Action Limits
|
# GitHub Action Limits
|
||||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
install_gcc:
|
install_gcc:
|
||||||
name: gcc with clang
|
name: gcc with clang
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: spack install
|
- name: spack install
|
||||||
@@ -36,11 +41,12 @@ jobs:
|
|||||||
|
|
||||||
install_jupyter_clang:
|
install_jupyter_clang:
|
||||||
name: jupyter
|
name: jupyter
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
timeout-minutes: 700
|
timeout-minutes: 700
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: spack install
|
- name: spack install
|
||||||
@@ -50,10 +56,11 @@ jobs:
|
|||||||
|
|
||||||
install_scipy_clang:
|
install_scipy_clang:
|
||||||
name: scipy, mpl, pd
|
name: scipy, mpl, pd
|
||||||
|
if: github.repository == 'spack/spack'
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: spack install
|
- name: spack install
|
||||||
|
|||||||
1
.github/workflows/setup_git.ps1
vendored
1
.github/workflows/setup_git.ps1
vendored
@@ -4,6 +4,7 @@ Set-Location spack
|
|||||||
|
|
||||||
git config --global user.email "spack@example.com"
|
git config --global user.email "spack@example.com"
|
||||||
git config --global user.name "Test User"
|
git config --global user.name "Test User"
|
||||||
|
git config --global core.longpaths true
|
||||||
|
|
||||||
if ($(git branch --show-current) -ne "develop")
|
if ($(git branch --show-current) -ne "develop")
|
||||||
{
|
{
|
||||||
|
|||||||
49
.github/workflows/unit_tests.yaml
vendored
49
.github/workflows/unit_tests.yaml
vendored
@@ -9,14 +9,19 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
- releases/**
|
- releases/**
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Validate that the code can be run on all the Python versions
|
# Validate that the code can be run on all the Python versions
|
||||||
# supported by Spack
|
# supported by Spack
|
||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
@@ -31,10 +36,10 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -57,7 +62,7 @@ jobs:
|
|||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -106,10 +111,10 @@ jobs:
|
|||||||
- python-version: 3.9
|
- python-version: 3.9
|
||||||
concretizer: original
|
concretizer: original
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -162,7 +167,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
@@ -171,10 +176,10 @@ jobs:
|
|||||||
needs: [ validate, style, changes ]
|
needs: [ validate, style, changes ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -200,7 +205,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
@@ -218,7 +223,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -237,10 +242,10 @@ jobs:
|
|||||||
needs: [ validate, style, changes ]
|
needs: [ validate, style, changes ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -274,7 +279,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
@@ -286,10 +291,10 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
python-version: [3.8]
|
python-version: [3.8]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -320,7 +325,7 @@ jobs:
|
|||||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||||
fi
|
fi
|
||||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
files: ./coverage.xml
|
files: ./coverage.xml
|
||||||
@@ -331,8 +336,8 @@ jobs:
|
|||||||
needs: [ validate, style, changes ]
|
needs: [ validate, style, changes ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -345,12 +350,12 @@ jobs:
|
|||||||
coverage run $(which spack) audit packages
|
coverage run $(which spack) audit packages
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Package audits (wwithout coverage)
|
- name: Package audits (without coverage)
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) audit packages
|
$(which spack) audit packages
|
||||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,audits
|
flags: unittests,linux,audits
|
||||||
|
|||||||
35
.github/workflows/windows_python.yml
vendored
35
.github/workflows/windows_python.yml
vendored
@@ -9,6 +9,11 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
- releases/**
|
- releases/**
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell:
|
shell:
|
||||||
@@ -17,8 +22,8 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
@@ -33,10 +38,10 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -55,10 +60,10 @@ jobs:
|
|||||||
needs: [ validate, style ]
|
needs: [ validate, style ]
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -75,10 +80,10 @@ jobs:
|
|||||||
needs: [ validate, style ]
|
needs: [ validate, style ]
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -95,10 +100,10 @@ jobs:
|
|||||||
needs: [ validate, style ]
|
needs: [ validate, style ]
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -120,10 +125,10 @@ jobs:
|
|||||||
git config --global core.symlinks false
|
git config --global core.symlinks false
|
||||||
shell:
|
shell:
|
||||||
powershell
|
powershell
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -139,11 +144,11 @@ jobs:
|
|||||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||||
env:
|
env:
|
||||||
ProgressPreference: SilentlyContinue
|
ProgressPreference: SilentlyContinue
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||||
with:
|
with:
|
||||||
name: Windows Spack Installer Bundle
|
name: Windows Spack Installer Bundle
|
||||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||||
with:
|
with:
|
||||||
name: Windows Spack Installer
|
name: Windows Spack Installer
|
||||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||||
@@ -154,7 +159,7 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
|||||||
204
CHANGELOG.md
204
CHANGELOG.md
@@ -1,3 +1,205 @@
|
|||||||
|
# v0.18.0 (2022-05-28)
|
||||||
|
|
||||||
|
`v0.18.0` is a major feature release.
|
||||||
|
|
||||||
|
## Major features in this release
|
||||||
|
|
||||||
|
1. **Concretizer now reuses by default**
|
||||||
|
|
||||||
|
`spack install --reuse` was introduced in `v0.17.0`, and `--reuse`
|
||||||
|
is now the default concretization mode. Spack will try hard to
|
||||||
|
resolve dependencies using installed packages or binaries (#30396).
|
||||||
|
|
||||||
|
To avoid reuse and to use the latest package configurations, (the
|
||||||
|
old default), you can use `spack install --fresh`, or add
|
||||||
|
configuration like this to your environment or `concretizer.yaml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
concretizer:
|
||||||
|
reuse: false
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Finer-grained hashes**
|
||||||
|
|
||||||
|
Spack hashes now include `link`, `run`, *and* `build` dependencies,
|
||||||
|
as well as a canonical hash of package recipes. Previously, hashes
|
||||||
|
only included `link` and `run` dependencies (though `build`
|
||||||
|
dependencies were stored by environments). We coarsened the hash to
|
||||||
|
reduce churn in user installations, but the new default concretizer
|
||||||
|
behavior mitigates this concern and gets us reuse *and* provenance.
|
||||||
|
You will be able to see the build dependencies of new installations
|
||||||
|
with `spack find`. Old installations will not change and their
|
||||||
|
hashes will not be affected. (#28156, #28504, #30717, #30861)
|
||||||
|
|
||||||
|
3. **Improved error messages**
|
||||||
|
|
||||||
|
Error handling with the new concretizer is now done with
|
||||||
|
optimization criteria rather than with unsatisfiable cores, and
|
||||||
|
Spack reports many more details about conflicting constraints.
|
||||||
|
(#30669)
|
||||||
|
|
||||||
|
4. **Unify environments when possible**
|
||||||
|
|
||||||
|
Environments have thus far supported `concretization: together` or
|
||||||
|
`concretization: separately`. These have been replaced by a new
|
||||||
|
preference in `concretizer.yaml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
concretizer:
|
||||||
|
unify: [true|false|when_possible]
|
||||||
|
```
|
||||||
|
|
||||||
|
`concretizer:unify:when_possible` will *try* to resolve a fully
|
||||||
|
unified environment, but if it cannot, it will create multiple
|
||||||
|
configurations of some packages where it has to. For large
|
||||||
|
environments that previously had to be concretized separately, this
|
||||||
|
can result in a huge speedup (40-50x). (#28941)
|
||||||
|
|
||||||
|
5. **Automatically find externals on Cray machines**
|
||||||
|
|
||||||
|
Spack can now automatically discover installed packages in the Cray
|
||||||
|
Programming Environment by running `spack external find` (or `spack
|
||||||
|
external read-cray-manifest` to *only* query the PE). Packages from
|
||||||
|
the PE (e.g., `cray-mpich` are added to the database with full
|
||||||
|
dependency information, and compilers from the PE are added to
|
||||||
|
`compilers.yaml`. Available with the June 2022 release of the Cray
|
||||||
|
Programming Environment. (#24894, #30428)
|
||||||
|
|
||||||
|
6. **New binary format and hardened signing**
|
||||||
|
|
||||||
|
Spack now has an updated binary format, with improvements for
|
||||||
|
security. The new format has a detached signature file, and Spack
|
||||||
|
verifies the signature before untarring or decompressing the binary
|
||||||
|
package. The previous format embedded the signature in a `tar`
|
||||||
|
file, which required the client to run `tar` *before* verifying
|
||||||
|
(#30750). Spack can still install from build caches using the old
|
||||||
|
format, but we encourage users to switch to the new format going
|
||||||
|
forward.
|
||||||
|
|
||||||
|
Production GitLab pipelines have been hardened to securely sign
|
||||||
|
binaries. There is now a separate signing stage so that signing
|
||||||
|
keys are never exposed to build system code, and signing keys are
|
||||||
|
ephemeral and only live as long as the signing pipeline stage.
|
||||||
|
(#30753)
|
||||||
|
|
||||||
|
7. **Bootstrap mirror generation**
|
||||||
|
|
||||||
|
The `spack bootstrap mirror` command can automatically create a
|
||||||
|
mirror for bootstrapping the concretizer and other needed
|
||||||
|
dependencies in an air-gapped environment. (#28556)
|
||||||
|
|
||||||
|
8. **Nascent Windows support**
|
||||||
|
|
||||||
|
Spack now has initial support for Windows. Spack core has been
|
||||||
|
refactored to run in the Windows environment, and a small number of
|
||||||
|
packages can now build for Windows. More details are
|
||||||
|
[in the documentation](https://spack.rtfd.io/en/latest/getting_started.html#spack-on-windows)
|
||||||
|
(#27021, #28385, many more)
|
||||||
|
|
||||||
|
9. **Makefile generation**
|
||||||
|
|
||||||
|
`spack env depfile` can be used to generate a `Makefile` from an
|
||||||
|
environment, which can be used to build packages the environment
|
||||||
|
in parallel on a single node. e.g.:
|
||||||
|
|
||||||
|
```console
|
||||||
|
spack -e myenv env depfile > Makefile
|
||||||
|
make
|
||||||
|
```
|
||||||
|
|
||||||
|
Spack propagates `gmake` jobserver information to builds so that
|
||||||
|
their jobs can share cores. (#30039, #30254, #30302, #30526)
|
||||||
|
|
||||||
|
10. **New variant features**
|
||||||
|
|
||||||
|
In addition to being conditional themselves, variants can now have
|
||||||
|
[conditional *values*](https://spack.readthedocs.io/en/latest/packaging_guide.html#conditional-possible-values)
|
||||||
|
that are only possible for certain configurations of a package. (#29530)
|
||||||
|
|
||||||
|
Variants can be
|
||||||
|
[declared "sticky"](https://spack.readthedocs.io/en/latest/packaging_guide.html#sticky-variants),
|
||||||
|
which prevents them from being enabled or disabled by the
|
||||||
|
concretizer. Sticky variants must be set explicitly by users
|
||||||
|
on the command line or in `packages.yaml`. (#28630)
|
||||||
|
|
||||||
|
* Allow conditional possible values in variants
|
||||||
|
* Add a "sticky" property to variants
|
||||||
|
|
||||||
|
|
||||||
|
## Other new features of note
|
||||||
|
|
||||||
|
* Environment views can optionally link only `run` dependencies
|
||||||
|
with `link:run` (#29336)
|
||||||
|
* `spack external find --all` finds library-only packages in
|
||||||
|
addition to build dependencies (#28005)
|
||||||
|
* Customizable `config:license_dir` option (#30135)
|
||||||
|
* `spack external find --path PATH` takes a custom search path (#30479)
|
||||||
|
* `spack spec` has a new `--format` argument like `spack find` (#27908)
|
||||||
|
* `spack concretize --quiet` skips printing concretized specs (#30272)
|
||||||
|
* `spack info` now has cleaner output and displays test info (#22097)
|
||||||
|
* Package-level submodule option for git commit versions (#30085, #30037)
|
||||||
|
* Using `/hash` syntax to refer to concrete specs in an environment
|
||||||
|
now works even if `/hash` is not installed. (#30276)
|
||||||
|
|
||||||
|
## Major internal refactors
|
||||||
|
|
||||||
|
* full hash (see above)
|
||||||
|
* new develop versioning scheme `0.19.0-dev0`
|
||||||
|
* Allow for multiple dependencies/dependents from the same package (#28673)
|
||||||
|
* Splice differing virtual packages (#27919)
|
||||||
|
|
||||||
|
## Performance Improvements
|
||||||
|
|
||||||
|
* Concretization of large environments with `unify: when_possible` is
|
||||||
|
much faster than concretizing separately (#28941, see above)
|
||||||
|
* Single-pass view generation algorithm is 2.6x faster (#29443)
|
||||||
|
|
||||||
|
## Archspec improvements
|
||||||
|
|
||||||
|
* `oneapi` and `dpcpp` flag support (#30783)
|
||||||
|
* better support for `M1` and `a64fx` (#30683)
|
||||||
|
|
||||||
|
## Removals and Deprecations
|
||||||
|
|
||||||
|
* Spack no longer supports Python `2.6` (#27256)
|
||||||
|
* Removed deprecated `--run-tests` option of `spack install`;
|
||||||
|
use `spack test` (#30461)
|
||||||
|
* Removed deprecated `spack flake8`; use `spack style` (#27290)
|
||||||
|
|
||||||
|
* Deprecate `spack:concretization` config option; use
|
||||||
|
`concretizer:unify` (#30038)
|
||||||
|
* Deprecate top-level module configuration; use module sets (#28659)
|
||||||
|
* `spack activate` and `spack deactivate` are deprecated in favor of
|
||||||
|
environments; will be removed in `0.19.0` (#29430; see also `link:run`
|
||||||
|
in #29336 above)
|
||||||
|
|
||||||
|
## Notable Bugfixes
|
||||||
|
|
||||||
|
* Fix bug that broke locks with many parallel builds (#27846)
|
||||||
|
* Many bugfixes and consistency improvements for the new concretizer
|
||||||
|
and `--reuse` (#30357, #30092, #29835, #29933, #28605, #29694, #28848)
|
||||||
|
|
||||||
|
## Packages
|
||||||
|
|
||||||
|
* `CMakePackage` uses `CMAKE_INSTALL_RPATH_USE_LINK_PATH` (#29703)
|
||||||
|
* Refactored `lua` support: `lua-lang` virtual supports both
|
||||||
|
`lua` and `luajit` via new `LuaPackage` build system(#28854)
|
||||||
|
* PythonPackage: now installs packages with `pip` (#27798)
|
||||||
|
* Python: improve site_packages_dir handling (#28346)
|
||||||
|
* Extends: support spec, not just package name (#27754)
|
||||||
|
* `find_libraries`: search for both .so and .dylib on macOS (#28924)
|
||||||
|
* Use stable URLs and `?full_index=1` for all github patches (#29239)
|
||||||
|
|
||||||
|
## Spack community stats
|
||||||
|
|
||||||
|
* 6,416 total packages, 458 new since `v0.17.0`
|
||||||
|
* 219 new Python packages
|
||||||
|
* 60 new R packages
|
||||||
|
* 377 people contributed to this release
|
||||||
|
* 337 committers to packages
|
||||||
|
* 85 committers to core
|
||||||
|
|
||||||
|
|
||||||
# v0.17.2 (2022-04-13)
|
# v0.17.2 (2022-04-13)
|
||||||
|
|
||||||
### Spack bugfixes
|
### Spack bugfixes
|
||||||
@@ -11,7 +213,7 @@
|
|||||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||||
* Fix handling of Intel compiler environment (#29439)
|
* Fix handling of Intel compiler environment (#29439)
|
||||||
* Fix a few edge cases when reindexing the DB (#28764)
|
* Fix a few edge cases when reindexing the DB (#28764)
|
||||||
* Remove "Known issues" from documentation (#29664)
|
* Remove "Known issues" from documentation (#29664)
|
||||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||||
|
|
||||||
# v0.17.1 (2021-12-23)
|
# v0.17.1 (2021-12-23)
|
||||||
|
|||||||
@@ -6,34 +6,15 @@ bootstrap:
|
|||||||
# by Spack is installed in a "store" subfolder of this root directory
|
# by Spack is installed in a "store" subfolder of this root directory
|
||||||
root: $user_cache_path/bootstrap
|
root: $user_cache_path/bootstrap
|
||||||
# Methods that can be used to bootstrap software. Each method may or
|
# Methods that can be used to bootstrap software. Each method may or
|
||||||
# may not be able to bootstrap all of the software that Spack needs,
|
# may not be able to bootstrap all the software that Spack needs,
|
||||||
# depending on its type.
|
# depending on its type.
|
||||||
sources:
|
sources:
|
||||||
- name: 'github-actions-v0.2'
|
- name: 'github-actions-v0.2'
|
||||||
type: buildcache
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||||
description: |
|
|
||||||
Buildcache generated from a public workflow using Github Actions.
|
|
||||||
The sha256 checksum of binaries is checked before installation.
|
|
||||||
info:
|
|
||||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
|
||||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
|
||||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
|
||||||
- name: 'github-actions-v0.1'
|
- name: 'github-actions-v0.1'
|
||||||
type: buildcache
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||||
description: |
|
- name: 'spack-install'
|
||||||
Buildcache generated from a public workflow using Github Actions.
|
metadata: $spack/share/spack/bootstrap/spack-install
|
||||||
The sha256 checksum of binaries is checked before installation.
|
|
||||||
info:
|
|
||||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
|
||||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
|
||||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
|
||||||
# This method is just Spack bootstrapping the software it needs from sources.
|
|
||||||
# It has been added here so that users can selectively disable bootstrapping
|
|
||||||
# from sources by "untrusting" it.
|
|
||||||
- name: spack-install
|
|
||||||
type: install
|
|
||||||
description: |
|
|
||||||
Specs built from sources by Spack. May take a long time.
|
|
||||||
trusted:
|
trusted:
|
||||||
# By default we trust bootstrapping from sources and from binaries
|
# By default we trust bootstrapping from sources and from binaries
|
||||||
# produced on Github via the workflow
|
# produced on Github via the workflow
|
||||||
|
|||||||
@@ -14,4 +14,23 @@ concretizer:
|
|||||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||||
# concretization.
|
# concretization.
|
||||||
reuse: false
|
reuse: true
|
||||||
|
# Options that tune which targets are considered for concretization. The
|
||||||
|
# concretization process is very sensitive to the number targets, and the time
|
||||||
|
# needed to reach a solution increases noticeably with the number of targets
|
||||||
|
# considered.
|
||||||
|
targets:
|
||||||
|
# Determine whether we want to target specific or generic microarchitectures.
|
||||||
|
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||||
|
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||||
|
granularity: microarchitectures
|
||||||
|
# If "false" allow targets that are incompatible with the current host (for
|
||||||
|
# instance concretize with target "icelake" while running on "haswell").
|
||||||
|
# If "true" only allow targets that are compatible with the host.
|
||||||
|
host_compatible: true
|
||||||
|
# When "true" concretize root specs of environments together, so that each unique
|
||||||
|
# package in an environment corresponds to one concrete spec. This ensures
|
||||||
|
# environments can always be activated. When "false" perform concretization separately
|
||||||
|
# on each root spec, allowing different versions and variants of the same package in
|
||||||
|
# an environment.
|
||||||
|
unify: false
|
||||||
@@ -33,6 +33,9 @@ config:
|
|||||||
template_dirs:
|
template_dirs:
|
||||||
- $spack/share/spack/templates
|
- $spack/share/spack/templates
|
||||||
|
|
||||||
|
# Directory where licenses should be located
|
||||||
|
license_dir: $spack/etc/spack/licenses
|
||||||
|
|
||||||
# Temporary locations Spack can try to use for builds.
|
# Temporary locations Spack can try to use for builds.
|
||||||
#
|
#
|
||||||
# Recommended options are given below.
|
# Recommended options are given below.
|
||||||
|
|||||||
@@ -35,7 +35,8 @@ packages:
|
|||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libllvm: [llvm, llvm-amdgpu]
|
libllvm: [llvm, llvm-amdgpu]
|
||||||
lua-lang: [lua, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
mariadb-client: [mariadb-c-client, mariadb]
|
mariadb-client: [mariadb-c-client, mariadb]
|
||||||
mkl: [intel-mkl]
|
mkl: [intel-mkl]
|
||||||
mpe: [mpe2]
|
mpe: [mpe2]
|
||||||
|
|||||||
@@ -192,32 +192,32 @@ you can use them to customize an installation in :ref:`sec-specs`.
|
|||||||
Reusing installed dependencies
|
Reusing installed dependencies
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
.. warning::
|
By default, when you run ``spack install``, Spack tries hard to reuse existing installations
|
||||||
|
as dependencies, either from a local store or from remote buildcaches if configured.
|
||||||
|
This minimizes unwanted rebuilds of common dependencies, in particular if
|
||||||
|
you update Spack frequently.
|
||||||
|
|
||||||
The ``--reuse`` option described here will become the default installation
|
In case you want the latest versions and configurations to be installed instead,
|
||||||
method in the next Spack version, and you will be able to get the current
|
you can add the ``--fresh`` option:
|
||||||
behavior by using ``spack install --fresh``.
|
|
||||||
|
|
||||||
By default, when you run ``spack install``, Spack tries to build a new
|
|
||||||
version of the package you asked for, along with updated versions of
|
|
||||||
its dependencies. This gets you the latest versions and configurations,
|
|
||||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
|
||||||
|
|
||||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
|
||||||
you can add the ``--reuse`` option:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack install --reuse mpich
|
$ spack install --fresh mpich
|
||||||
|
|
||||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
Reusing installations in this mode is "accidental", and happening only if
|
||||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
there's a match between existing installations and what Spack would have installed
|
||||||
the ``mpich`` will be build with the installed versions, if possible.
|
anyhow.
|
||||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
|
||||||
|
You can use the ``spack spec -I mpich`` command to see what
|
||||||
will be reused and what will be built before you install.
|
will be reused and what will be built before you install.
|
||||||
|
|
||||||
You can configure Spack to use the ``--reuse`` behavior by default in
|
You can configure Spack to use the ``--fresh`` behavior by default in
|
||||||
``concretizer.yaml``.
|
``concretizer.yaml``:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
reuse: false
|
||||||
|
|
||||||
.. _cmd-spack-uninstall:
|
.. _cmd-spack-uninstall:
|
||||||
|
|
||||||
|
|||||||
@@ -50,6 +50,13 @@ build cache files for the "ninja" spec:
|
|||||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||||
you can add it as a mirror, discussed next.
|
you can add it as a mirror, discussed next.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||||
|
will be able to verify and install binary caches both in the new and in the old format.
|
||||||
|
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||||
|
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||||
|
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
Finding or installing build cache files
|
Finding or installing build cache files
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
|
|||||||
160
lib/spack/docs/bootstrapping.rst
Normal file
160
lib/spack/docs/bootstrapping.rst
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||||
|
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _bootstrapping:
|
||||||
|
|
||||||
|
=============
|
||||||
|
Bootstrapping
|
||||||
|
=============
|
||||||
|
|
||||||
|
In the :ref:`Getting started <getting_started>` Section we already mentioned that
|
||||||
|
Spack can bootstrap some of its dependencies, including ``clingo``. In fact, there
|
||||||
|
is an entire command dedicated to the management of every aspect of bootstrapping:
|
||||||
|
|
||||||
|
.. command-output:: spack bootstrap --help
|
||||||
|
|
||||||
|
The first thing to know to understand bootstrapping in Spack is that each of
|
||||||
|
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||||
|
can't be found. You can readily check if any prerequisite for using Spack
|
||||||
|
is missing by running:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap status
|
||||||
|
Spack v0.17.1 - python@3.8
|
||||||
|
|
||||||
|
[FAIL] Core Functionalities
|
||||||
|
[B] MISSING "clingo": required to concretize specs
|
||||||
|
|
||||||
|
[FAIL] Binary packages
|
||||||
|
[B] MISSING "gpg2": required to sign/verify buildcaches
|
||||||
|
|
||||||
|
|
||||||
|
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||||
|
|
||||||
|
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||||
|
are missing and it's giving detailed information on why they are needed and whether
|
||||||
|
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack solve zlib
|
||||||
|
==> Bootstrapping clingo from pre-built binaries
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.1/build_cache/darwin-catalina-x86_64/apple-clang-12.0.0/clingo-bootstrap-spack/darwin-catalina-x86_64-apple-clang-12.0.0-clingo-bootstrap-spack-p5on7i4hejl775ezndzfdkhvwra3hatn.spack
|
||||||
|
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||||
|
[ ... ]
|
||||||
|
|
||||||
|
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||||
|
|
||||||
|
-----------------------
|
||||||
|
The Bootstrapping store
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
The software installed for bootstrapping purposes is deployed in a separate store.
|
||||||
|
Its location can be checked with the following command:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap root
|
||||||
|
|
||||||
|
It can also be changed with the same command by just specifying the newly desired path:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap root /opt/spack/bootstrap
|
||||||
|
|
||||||
|
You can check what is installed in the bootstrapping store at any time using:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack find -b
|
||||||
|
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||||
|
==> 11 installed packages
|
||||||
|
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||||
|
clingo-bootstrap@spack libassuan@2.5.5 libgpg-error@1.42 libksba@1.5.1 pinentry@1.1.1 zlib@1.2.11
|
||||||
|
gnupg@2.3.1 libgcrypt@1.9.3 libiconv@1.16 npth@1.6 python@3.8
|
||||||
|
|
||||||
|
In case it is needed you can remove all the software in the current bootstrapping store with:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack clean -b
|
||||||
|
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||||
|
|
||||||
|
% spack find -b
|
||||||
|
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
--------------------------------------------
|
||||||
|
Enabling and disabling bootstrapping methods
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
Bootstrapping is always performed by trying the methods listed by:
|
||||||
|
|
||||||
|
.. command-output:: spack bootstrap list
|
||||||
|
|
||||||
|
in the order they appear, from top to bottom. By default Spack is
|
||||||
|
configured to try first bootstrapping from pre-built binaries and to
|
||||||
|
fall-back to bootstrapping from sources if that failed.
|
||||||
|
|
||||||
|
If need be, you can disable bootstrapping altogether by running:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap disable
|
||||||
|
|
||||||
|
in which case it's your responsibility to ensure Spack runs in an
|
||||||
|
environment where all its prerequisites are installed. You can
|
||||||
|
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||||
|
them. For instance:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap untrust github-actions
|
||||||
|
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||||
|
|
||||||
|
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap trust github-actions
|
||||||
|
|
||||||
|
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap reset
|
||||||
|
==> Bootstrapping configuration is being reset to Spack's defaults. Current configuration will be lost.
|
||||||
|
Do you want to continue? [Y/n]
|
||||||
|
%
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
Creating a mirror for air-gapped systems
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
Spack's default configuration for bootstrapping relies on the user having
|
||||||
|
access to the internet, either to fetch pre-compiled binaries or source tarballs.
|
||||||
|
Sometimes though Spack is deployed on air-gapped systems where such access is denied.
|
||||||
|
|
||||||
|
To help with similar situations Spack has a command that recreates, in a local folder
|
||||||
|
of choice, a mirror containing the source tarballs and/or binary packages needed for
|
||||||
|
bootstrapping.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack bootstrap mirror --binary-packages /opt/bootstrap
|
||||||
|
==> Adding "clingo-bootstrap@spack+python %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||||
|
==> Adding "gnupg@2.3: %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||||
|
==> Adding "patchelf@0.13.1:0.13.99 %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||||
|
==> Adding binary packages from "https://github.com/alalazo/spack-bootstrap-mirrors/releases/download/v0.1-rc.2/bootstrap-buildcache.tar.gz" to the mirror at /opt/bootstrap/local-mirror
|
||||||
|
|
||||||
|
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||||
|
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||||
|
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||||
|
|
||||||
|
|
||||||
|
This command needs to be run on a machine with internet access and the resulting folder
|
||||||
|
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||||
|
commands suggested at the prompt, they can be used to bootstrap Spack.
|
||||||
@@ -219,33 +219,65 @@ Concretizer options
|
|||||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||||
algorithm it uses to select the dependencies you install:
|
algorithm it uses to select the dependencies you install:
|
||||||
|
|
||||||
.. _code-block: yaml
|
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||||
|
:language: yaml
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Reuse already installed packages
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||||
|
whether it will do a "fresh" installation and prefer the latest settings from
|
||||||
|
``package.py`` files and ``packages.yaml`` (``false``).
|
||||||
|
You can use:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack install --reuse <spec>
|
||||||
|
|
||||||
|
to enable reuse for a single installation, and you can use:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
spack install --fresh <spec>
|
||||||
|
|
||||||
|
to do a fresh install if ``reuse`` is enabled by default.
|
||||||
|
``reuse: true`` is the default.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Selection of the target microarchitectures
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||||
|
Currently the options in this section are only configurable from the ``concretization.yaml`` file
|
||||||
|
and there are no corresponding command line arguments to enable them for a single solve.
|
||||||
|
|
||||||
|
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||||
|
If set to:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
concretizer:
|
concretizer:
|
||||||
# Whether to consider installed packages or packages from buildcaches when
|
targets:
|
||||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
granularity: microarchitectures
|
||||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
|
||||||
# concretization.
|
|
||||||
reuse: false
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^
|
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||||
``reuse``
|
compatibility. If instead the option is set to:
|
||||||
^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
This controls whether Spack will prefer to use installed packages (``true``), or
|
.. code-block:: yaml
|
||||||
whether it will do a "fresh" installation and prefer the latest settings from
|
|
||||||
``package.py`` files and ``packages.yaml`` (``false``). .
|
|
||||||
|
|
||||||
You can use ``spack install --reuse`` to enable reuse for a single installation,
|
concretizer:
|
||||||
and you can use ``spack install --fresh`` to do a fresh install if ``reuse`` is
|
targets:
|
||||||
enabled by default.
|
granularity: generic
|
||||||
|
|
||||||
.. note::
|
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||||
|
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||||
``reuse: false`` is the current default, but ``reuse: true`` will be the default
|
``x86_64_v3`` as the best target in the latter case.
|
||||||
in the next Spack release. You will still be able to use ``spack install --fresh``
|
|
||||||
to get the old behavior.
|
|
||||||
|
|
||||||
|
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||||
|
microarchitectures considered during the solve are constrained to be compatible with the
|
||||||
|
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||||
|
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||||
|
|
||||||
.. _package-preferences:
|
.. _package-preferences:
|
||||||
|
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ on these ideas for each distinct build system that Spack supports:
|
|||||||
|
|
||||||
build_systems/autotoolspackage
|
build_systems/autotoolspackage
|
||||||
build_systems/cmakepackage
|
build_systems/cmakepackage
|
||||||
|
build_systems/cachedcmakepackage
|
||||||
build_systems/mesonpackage
|
build_systems/mesonpackage
|
||||||
build_systems/qmakepackage
|
build_systems/qmakepackage
|
||||||
build_systems/sippackage
|
build_systems/sippackage
|
||||||
@@ -47,6 +48,7 @@ on these ideas for each distinct build system that Spack supports:
|
|||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
:caption: Language-specific
|
:caption: Language-specific
|
||||||
|
|
||||||
|
build_systems/luapackage
|
||||||
build_systems/octavepackage
|
build_systems/octavepackage
|
||||||
build_systems/perlpackage
|
build_systems/perlpackage
|
||||||
build_systems/pythonpackage
|
build_systems/pythonpackage
|
||||||
@@ -60,11 +62,12 @@ on these ideas for each distinct build system that Spack supports:
|
|||||||
|
|
||||||
build_systems/bundlepackage
|
build_systems/bundlepackage
|
||||||
build_systems/cudapackage
|
build_systems/cudapackage
|
||||||
|
build_systems/custompackage
|
||||||
build_systems/inteloneapipackage
|
build_systems/inteloneapipackage
|
||||||
build_systems/intelpackage
|
build_systems/intelpackage
|
||||||
build_systems/rocmpackage
|
|
||||||
build_systems/custompackage
|
|
||||||
build_systems/multiplepackage
|
build_systems/multiplepackage
|
||||||
|
build_systems/rocmpackage
|
||||||
|
build_systems/sourceforgepackage
|
||||||
|
|
||||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||||
provide a list of build systems and methods/attributes that can be
|
provide a list of build systems and methods/attributes that can be
|
||||||
|
|||||||
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||||
|
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _cachedcmakepackage:
|
||||||
|
|
||||||
|
------------------
|
||||||
|
CachedCMakePackage
|
||||||
|
------------------
|
||||||
|
|
||||||
|
The CachedCMakePackage base class is used for CMake-based workflows
|
||||||
|
that create a CMake cache file prior to running ``cmake``. This is
|
||||||
|
useful for packages with arguments longer than the system limit, and
|
||||||
|
for reproducibility.
|
||||||
|
|
||||||
|
The documentation for this class assumes that the user is familiar with
|
||||||
|
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||||
|
for :ref:`CMakePackage <cmakepackage>`.
|
||||||
|
|
||||||
|
^^^^^^
|
||||||
|
Phases
|
||||||
|
^^^^^^
|
||||||
|
|
||||||
|
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||||
|
|
||||||
|
#. ``initconfig`` - generate the CMake cache file
|
||||||
|
#. ``cmake`` - generate the Makefile
|
||||||
|
#. ``build`` - build the package
|
||||||
|
#. ``install`` - install the package
|
||||||
|
|
||||||
|
By default, these phases run:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ mkdir spack-build
|
||||||
|
$ cd spack-build
|
||||||
|
$ cat << EOF > name-arch-compiler@version.cmake
|
||||||
|
# Write information on compilers and dependencies
|
||||||
|
# includes information on mpi and cuda if applicable
|
||||||
|
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||||
|
$ make
|
||||||
|
$ make test # optional
|
||||||
|
$ make install
|
||||||
|
|
||||||
|
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||||
|
class, and accepts all of the same options and adds all of the same
|
||||||
|
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||||
|
you may need to add a few arguments yourself, and the
|
||||||
|
``CachedCMakePackage`` provides the same interface to add those
|
||||||
|
flags.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Adding entries to the CMake cache
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
In addition to adding flags to the ``cmake`` command, you may need to
|
||||||
|
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||||
|
be done by overriding one of four methods:
|
||||||
|
|
||||||
|
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||||
|
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||||
|
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||||
|
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||||
|
|
||||||
|
Each of these methods returns a list of CMake cache strings. The
|
||||||
|
distinction between these methods is merely to provide a
|
||||||
|
well-structured and legible cmake cache file -- otherwise, entries
|
||||||
|
from each of these methods are handled identically.
|
||||||
|
|
||||||
|
Spack also provides convenience methods for generating CMake cache
|
||||||
|
entries. These methods are available at module scope in every Spack
|
||||||
|
package. Because CMake parses boolean options, strings, and paths
|
||||||
|
differently, there are three such methods:
|
||||||
|
|
||||||
|
#. ``cmake_cache_option``
|
||||||
|
#. ``cmake_cache_string``
|
||||||
|
#. ``cmake_cache_path``
|
||||||
|
|
||||||
|
These methods each accept three parameters -- the name of the CMake
|
||||||
|
variable associated with the entry, the value of the entry, and an
|
||||||
|
optional comment -- and return strings in the appropriate format to be
|
||||||
|
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||||
|
methods may return comments beginning with the ``#`` character.
|
||||||
|
|
||||||
|
A typical usage of these methods may look something like this:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def initconfig_mpi_entries(self)
|
||||||
|
# Get existing MPI configurations
|
||||||
|
entries = super(self, Foo).initconfig_mpi_entries()
|
||||||
|
|
||||||
|
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||||
|
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||||
|
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||||
|
# cmake.
|
||||||
|
if '+mpi' in self.spec:
|
||||||
|
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||||
|
else:
|
||||||
|
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||||
|
|
||||||
|
def initconfig_package_entries(self):
|
||||||
|
# Package specific options
|
||||||
|
entries = []
|
||||||
|
|
||||||
|
entries.append('#Entries for build options')
|
||||||
|
|
||||||
|
bar_on = '+bar' in self.spec
|
||||||
|
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||||
|
|
||||||
|
entries.append('#Entries for dependencies')
|
||||||
|
|
||||||
|
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||||
|
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||||
|
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
External documentation
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
For more information on CMake cache files, see:
|
||||||
|
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
||||||
@@ -84,8 +84,8 @@ build ``hdf5`` with Intel oneAPI MPI do::
|
|||||||
|
|
||||||
spack install hdf5 +mpi ^intel-oneapi-mpi
|
spack install hdf5 +mpi ^intel-oneapi-mpi
|
||||||
|
|
||||||
Using an Externally Installed oneAPI
|
Using Externally Installed oneAPI Tools
|
||||||
====================================
|
=======================================
|
||||||
|
|
||||||
Spack can also use oneAPI tools that are manually installed with
|
Spack can also use oneAPI tools that are manually installed with
|
||||||
`Intel Installers`_. The procedures for configuring Spack to use
|
`Intel Installers`_. The procedures for configuring Spack to use
|
||||||
@@ -110,7 +110,7 @@ Another option is to manually add the configuration to
|
|||||||
Libraries
|
Libraries
|
||||||
---------
|
---------
|
||||||
|
|
||||||
If you want Spack to use MKL that you have installed without Spack in
|
If you want Spack to use oneMKL that you have installed without Spack in
|
||||||
the default location, then add the following to
|
the default location, then add the following to
|
||||||
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
||||||
|
|
||||||
@@ -139,7 +139,7 @@ You can also use Spack-installed libraries. For example::
|
|||||||
spack load intel-oneapi-mkl
|
spack load intel-oneapi-mkl
|
||||||
|
|
||||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||||
environment variables for building an application with MKL.
|
environment variables for building an application with oneMKL.
|
||||||
|
|
||||||
More information
|
More information
|
||||||
================
|
================
|
||||||
|
|||||||
@@ -15,6 +15,9 @@ IntelPackage
|
|||||||
Intel packages in Spack
|
Intel packages in Spack
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
This is an earlier version of Intel software development tools and has
|
||||||
|
now been replaced by Intel oneAPI Toolkits.
|
||||||
|
|
||||||
Spack can install and use several software development products offered by Intel.
|
Spack can install and use several software development products offered by Intel.
|
||||||
Some of these are available under no-cost terms, others require a paid license.
|
Some of these are available under no-cost terms, others require a paid license.
|
||||||
All share the same basic steps for configuration, installation, and, where
|
All share the same basic steps for configuration, installation, and, where
|
||||||
|
|||||||
105
lib/spack/docs/build_systems/luapackage.rst
Normal file
105
lib/spack/docs/build_systems/luapackage.rst
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _luapackage:
|
||||||
|
|
||||||
|
------------
|
||||||
|
LuaPackage
|
||||||
|
------------
|
||||||
|
|
||||||
|
LuaPackage is a helper for the common case of Lua packages that provide
|
||||||
|
a rockspec file. This is not meant to take a rock archive, but to build
|
||||||
|
a source archive or repository that provides a rockspec, which should cover
|
||||||
|
most lua packages. In the case a Lua package builds by Make rather than
|
||||||
|
luarocks, prefer MakefilePackage.
|
||||||
|
|
||||||
|
^^^^^^
|
||||||
|
Phases
|
||||||
|
^^^^^^
|
||||||
|
|
||||||
|
The ``LuaPackage`` base class comes with the following phases:
|
||||||
|
|
||||||
|
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||||
|
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||||
|
#. ``install`` - install the project
|
||||||
|
|
||||||
|
By default, these phases run:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
# If the archive is a source rock
|
||||||
|
$ luarocks unpack <archive>.src.rock
|
||||||
|
$ # preprocess is a noop by default
|
||||||
|
$ luarocks make <name>.rockspec
|
||||||
|
|
||||||
|
|
||||||
|
Any of these phases can be overridden in your package as necessary.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^
|
||||||
|
Important files
|
||||||
|
^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Packages that use the Lua/LuaRocks build system can be identified by the
|
||||||
|
presence of a ``*.rockspec`` file in their sourcetree, or can be fetched as
|
||||||
|
a source rock archive (``.src.rock``). This file declares things like build
|
||||||
|
instructions and dependencies, the ``.src.rock`` also contains all code.
|
||||||
|
|
||||||
|
It is common for the rockspec file to list the lua version required in
|
||||||
|
a dependency. The LuaPackage class adds appropriate dependencies on a Lua
|
||||||
|
implementation, but it is a good idea to specify the version required with
|
||||||
|
a ``depends_on`` statement. The block normally will be a table definition like
|
||||||
|
this:
|
||||||
|
|
||||||
|
.. code-block:: lua
|
||||||
|
|
||||||
|
dependencies = {
|
||||||
|
"lua >= 5.1",
|
||||||
|
}
|
||||||
|
|
||||||
|
The LuaPackage class supports source repositories and archives containing
|
||||||
|
a rockspec and directly downloading source rock files. It *does not* support
|
||||||
|
downloading dependencies listed inside a rockspec, and thus does not support
|
||||||
|
directly downloading a rockspec as an archive.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Build system dependencies
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
All base dependencies are added by the build system, but LuaRocks is run to
|
||||||
|
avoid downloading extra Lua dependencies during build. If the package needs
|
||||||
|
Lua libraries outside the standard set, they should be added as dependencies.
|
||||||
|
|
||||||
|
To specify a Lua version constraint but allow all lua implementations, prefer
|
||||||
|
to use ``depends_on("lua-lang@5.1:5.1.99")`` to express any 5.1 compatible
|
||||||
|
version. If the package requires LuaJit rather than Lua,
|
||||||
|
a ``depends_on("luajit")`` should be used to ensure a LuaJit distribution is
|
||||||
|
used instead of the Lua interpreter. Alternately, if only interpreted Lua will
|
||||||
|
work ``depends_on("lua")`` will express that.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Passing arguments to luarocks make
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
If you need to pass any arguments to the ``luarocks make`` call, you can
|
||||||
|
override the ``luarocks_args`` method like so:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def luarocks_args(self):
|
||||||
|
return ['flag1', 'flag2']
|
||||||
|
|
||||||
|
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def luarocks_args(self):
|
||||||
|
return ["CFLAGS='-Wno-error=implicit-function-declaration'"]
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
External documentation
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
For more information on the LuaRocks build system, see:
|
||||||
|
https://luarocks.org/
|
||||||
@@ -48,8 +48,9 @@ important to understand.
|
|||||||
**build backend**
|
**build backend**
|
||||||
Libraries used to define how to build a wheel. Examples
|
Libraries used to define how to build a wheel. Examples
|
||||||
include `setuptools <https://setuptools.pypa.io/>`__,
|
include `setuptools <https://setuptools.pypa.io/>`__,
|
||||||
`flit <https://flit.readthedocs.io/>`_, and
|
`flit <https://flit.readthedocs.io/>`_,
|
||||||
`poetry <https://python-poetry.org/>`_.
|
`poetry <https://python-poetry.org/>`_, and
|
||||||
|
`hatchling <https://hatch.pypa.io/latest/>`_.
|
||||||
|
|
||||||
^^^^^^^^^^^
|
^^^^^^^^^^^
|
||||||
Downloading
|
Downloading
|
||||||
@@ -326,6 +327,33 @@ for specifying the version requirements. Note that ``~=`` works
|
|||||||
differently in poetry than in setuptools and flit for versions that
|
differently in poetry than in setuptools and flit for versions that
|
||||||
start with a zero.
|
start with a zero.
|
||||||
|
|
||||||
|
"""""""""
|
||||||
|
hatchling
|
||||||
|
"""""""""
|
||||||
|
|
||||||
|
If the ``pyproject.toml`` lists ``hatchling.build`` as the
|
||||||
|
``build-backend``, it uses the hatchling build system. Look for
|
||||||
|
dependencies under the following keys:
|
||||||
|
|
||||||
|
* ``requires-python``
|
||||||
|
|
||||||
|
This specifies the version of Python that is required
|
||||||
|
|
||||||
|
* ``project.dependencies``
|
||||||
|
|
||||||
|
These packages are required for building and installation. You can
|
||||||
|
add them with ``type=('build', 'run')``.
|
||||||
|
|
||||||
|
* ``project.optional-dependencies``
|
||||||
|
|
||||||
|
This section includes keys with lists of optional dependencies
|
||||||
|
needed to enable those features. You should add a variant that
|
||||||
|
optionally adds these dependencies. This variant should be ``False``
|
||||||
|
by default.
|
||||||
|
|
||||||
|
See https://hatch.pypa.io/latest/config/dependency/ for more
|
||||||
|
information.
|
||||||
|
|
||||||
""""""
|
""""""
|
||||||
wheels
|
wheels
|
||||||
""""""
|
""""""
|
||||||
@@ -666,3 +694,4 @@ For more information on build backend tools, see:
|
|||||||
* setuptools: https://setuptools.pypa.io/
|
* setuptools: https://setuptools.pypa.io/
|
||||||
* flit: https://flit.readthedocs.io/
|
* flit: https://flit.readthedocs.io/
|
||||||
* poetry: https://python-poetry.org/
|
* poetry: https://python-poetry.org/
|
||||||
|
* hatchling: https://hatch.pypa.io/latest/
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ class of your package. For example, you can add it to your
|
|||||||
# Set up the hip macros needed by the build
|
# Set up the hip macros needed by the build
|
||||||
args.extend([
|
args.extend([
|
||||||
'-DENABLE_HIP=ON',
|
'-DENABLE_HIP=ON',
|
||||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix])
|
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||||
rocm_archs = spec.variants['amdgpu_target'].value
|
rocm_archs = spec.variants['amdgpu_target'].value
|
||||||
if 'none' not in rocm_archs:
|
if 'none' not in rocm_archs:
|
||||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||||
|
|||||||
55
lib/spack/docs/build_systems/sourceforgepackage.rst
Normal file
55
lib/spack/docs/build_systems/sourceforgepackage.rst
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _sourceforgepackage:
|
||||||
|
|
||||||
|
------------------
|
||||||
|
SourceforgePackage
|
||||||
|
------------------
|
||||||
|
|
||||||
|
``SourceforgePackage`` is a
|
||||||
|
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
||||||
|
sets the URL based on a list of Sourceforge mirrors listed in
|
||||||
|
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
||||||
|
Refer to the package source
|
||||||
|
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^
|
||||||
|
Methods
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
This package provides a method for populating mirror URLs.
|
||||||
|
|
||||||
|
**urls**
|
||||||
|
|
||||||
|
This method returns a list of possible URLs for package source.
|
||||||
|
It is decorated with `property` so its results are treated as
|
||||||
|
a package attribute.
|
||||||
|
|
||||||
|
Refer to
|
||||||
|
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
||||||
|
for information on how Spack uses the `urls` attribute during
|
||||||
|
fetching.
|
||||||
|
|
||||||
|
^^^^^
|
||||||
|
Usage
|
||||||
|
^^^^^
|
||||||
|
|
||||||
|
This helper package can be added to your package by adding it as a base
|
||||||
|
class of your package and defining the relative location of an archive
|
||||||
|
file for one version of your software.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
:emphasize-lines: 1,3
|
||||||
|
|
||||||
|
class MyPackage(AutotoolsPackage, SourceforgePackage):
|
||||||
|
...
|
||||||
|
sourceforge_mirror_path = "my-package/mypackage.1.0.0.tar.gz"
|
||||||
|
...
|
||||||
|
|
||||||
|
Over 40 packages are using ``SourceforcePackage`` this mix-in as of
|
||||||
|
July 2022 so there are multiple packages to choose from if you want
|
||||||
|
to see a real example.
|
||||||
@@ -23,7 +23,10 @@
|
|||||||
import sys
|
import sys
|
||||||
from glob import glob
|
from glob import glob
|
||||||
|
|
||||||
|
from docutils.statemachine import StringList
|
||||||
|
from sphinx.domains.python import PythonDomain
|
||||||
from sphinx.ext.apidoc import main as sphinx_apidoc
|
from sphinx.ext.apidoc import main as sphinx_apidoc
|
||||||
|
from sphinx.parsers import RSTParser
|
||||||
|
|
||||||
# -- Spack customizations -----------------------------------------------------
|
# -- Spack customizations -----------------------------------------------------
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
@@ -82,9 +85,6 @@
|
|||||||
#
|
#
|
||||||
# Disable duplicate cross-reference warnings.
|
# Disable duplicate cross-reference warnings.
|
||||||
#
|
#
|
||||||
from sphinx.domains.python import PythonDomain
|
|
||||||
|
|
||||||
|
|
||||||
class PatchedPythonDomain(PythonDomain):
|
class PatchedPythonDomain(PythonDomain):
|
||||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||||
if 'refspecific' in node:
|
if 'refspecific' in node:
|
||||||
@@ -92,8 +92,20 @@ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
|||||||
return super(PatchedPythonDomain, self).resolve_xref(
|
return super(PatchedPythonDomain, self).resolve_xref(
|
||||||
env, fromdocname, builder, typ, target, node, contnode)
|
env, fromdocname, builder, typ, target, node, contnode)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Disable tabs to space expansion in code blocks
|
||||||
|
# since Makefiles require tabs.
|
||||||
|
#
|
||||||
|
class NoTabExpansionRSTParser(RSTParser):
|
||||||
|
def parse(self, inputstring, document):
|
||||||
|
if isinstance(inputstring, str):
|
||||||
|
lines = inputstring.splitlines()
|
||||||
|
inputstring = StringList(lines, document.current_source)
|
||||||
|
super().parse(inputstring, document)
|
||||||
|
|
||||||
def setup(sphinx):
|
def setup(sphinx):
|
||||||
sphinx.add_domain(PatchedPythonDomain, override=True)
|
sphinx.add_domain(PatchedPythonDomain, override=True)
|
||||||
|
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
|
|||||||
@@ -59,7 +59,8 @@ other techniques to minimize the size of the final image:
|
|||||||
&& echo " specs:" \
|
&& echo " specs:" \
|
||||||
&& echo " - gromacs+mpi" \
|
&& echo " - gromacs+mpi" \
|
||||||
&& echo " - mpich" \
|
&& echo " - mpich" \
|
||||||
&& echo " concretization: together" \
|
&& echo " concretizer: together" \
|
||||||
|
&& echo " unify: true" \
|
||||||
&& echo " config:" \
|
&& echo " config:" \
|
||||||
&& echo " install_tree: /opt/software" \
|
&& echo " install_tree: /opt/software" \
|
||||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||||
@@ -108,9 +109,10 @@ Spack Images on Docker Hub
|
|||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
Docker images with Spack preinstalled and ready to be used are
|
Docker images with Spack preinstalled and ready to be used are
|
||||||
built on `Docker Hub <https://hub.docker.com/u/spack>`_
|
built when a release is tagged, or nightly on ``develop``. The images
|
||||||
at every push to ``develop`` or to a release branch. The OS that
|
are then pushed both to `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||||
are currently supported are summarized in the table below:
|
and to `GitHub Container Registry <https://github.com/orgs/spack/packages?repo_name=spack>`_.
|
||||||
|
The OS that are currently supported are summarized in the table below:
|
||||||
|
|
||||||
.. _containers-supported-os:
|
.. _containers-supported-os:
|
||||||
|
|
||||||
@@ -120,22 +122,31 @@ are currently supported are summarized in the table below:
|
|||||||
* - Operating System
|
* - Operating System
|
||||||
- Base Image
|
- Base Image
|
||||||
- Spack Image
|
- Spack Image
|
||||||
* - Ubuntu 16.04
|
|
||||||
- ``ubuntu:16.04``
|
|
||||||
- ``spack/ubuntu-xenial``
|
|
||||||
* - Ubuntu 18.04
|
* - Ubuntu 18.04
|
||||||
- ``ubuntu:18.04``
|
- ``ubuntu:18.04``
|
||||||
- ``spack/ubuntu-bionic``
|
- ``spack/ubuntu-bionic``
|
||||||
|
* - Ubuntu 20.04
|
||||||
|
- ``ubuntu:20.04``
|
||||||
|
- ``spack/ubuntu-focal``
|
||||||
|
* - Ubuntu 22.04
|
||||||
|
- ``ubuntu:22.04``
|
||||||
|
- ``spack/ubuntu-jammy``
|
||||||
* - CentOS 7
|
* - CentOS 7
|
||||||
- ``centos:7``
|
- ``centos:7``
|
||||||
- ``spack/centos7``
|
- ``spack/centos7``
|
||||||
|
* - CentOS Stream
|
||||||
|
- ``quay.io/centos/centos:stream``
|
||||||
|
- ``spack/centos-stream``
|
||||||
* - openSUSE Leap
|
* - openSUSE Leap
|
||||||
- ``opensuse/leap``
|
- ``opensuse/leap``
|
||||||
- ``spack/leap15``
|
- ``spack/leap15``
|
||||||
|
* - Amazon Linux 2
|
||||||
|
- ``amazonlinux:2``
|
||||||
|
- ``spack/amazon-linux``
|
||||||
|
|
||||||
All the images are tagged with the corresponding release of Spack:
|
All the images are tagged with the corresponding release of Spack:
|
||||||
|
|
||||||
.. image:: dockerhub_spack.png
|
.. image:: images/ghcr_spack.png
|
||||||
|
|
||||||
with the exception of the ``latest`` tag that points to the HEAD
|
with the exception of the ``latest`` tag that points to the HEAD
|
||||||
of the ``develop`` branch. These images are available for anyone
|
of the ``develop`` branch. These images are available for anyone
|
||||||
@@ -245,7 +256,8 @@ software is respectively built and installed:
|
|||||||
&& echo " specs:" \
|
&& echo " specs:" \
|
||||||
&& echo " - gromacs+mpi" \
|
&& echo " - gromacs+mpi" \
|
||||||
&& echo " - mpich" \
|
&& echo " - mpich" \
|
||||||
&& echo " concretization: together" \
|
&& echo " concretizer:" \
|
||||||
|
&& echo " unify: true" \
|
||||||
&& echo " config:" \
|
&& echo " config:" \
|
||||||
&& echo " install_tree: /opt/software" \
|
&& echo " install_tree: /opt/software" \
|
||||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||||
@@ -366,7 +378,8 @@ produces, for instance, the following ``Dockerfile``:
|
|||||||
&& echo " externals:" \
|
&& echo " externals:" \
|
||||||
&& echo " - spec: cuda%gcc" \
|
&& echo " - spec: cuda%gcc" \
|
||||||
&& echo " prefix: /usr/local/cuda" \
|
&& echo " prefix: /usr/local/cuda" \
|
||||||
&& echo " concretization: together" \
|
&& echo " concretizer:" \
|
||||||
|
&& echo " unify: true" \
|
||||||
&& echo " config:" \
|
&& echo " config:" \
|
||||||
&& echo " install_tree: /opt/software" \
|
&& echo " install_tree: /opt/software" \
|
||||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||||
|
|||||||
@@ -107,7 +107,6 @@ with a high level view of Spack's directory structure:
|
|||||||
llnl/ <- some general-use libraries
|
llnl/ <- some general-use libraries
|
||||||
|
|
||||||
spack/ <- spack module; contains Python code
|
spack/ <- spack module; contains Python code
|
||||||
analyzers/ <- modules to run analysis on installed packages
|
|
||||||
build_systems/ <- modules for different build systems
|
build_systems/ <- modules for different build systems
|
||||||
cmd/ <- each file in here is a spack subcommand
|
cmd/ <- each file in here is a spack subcommand
|
||||||
compilers/ <- compiler description files
|
compilers/ <- compiler description files
|
||||||
@@ -151,7 +150,7 @@ Package-related modules
|
|||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
:mod:`spack.package`
|
:mod:`spack.package`
|
||||||
Contains the :class:`~spack.package.Package` class, which
|
Contains the :class:`~spack.package_base.Package` class, which
|
||||||
is the superclass for all packages in Spack. Methods on ``Package``
|
is the superclass for all packages in Spack. Methods on ``Package``
|
||||||
implement all phases of the :ref:`package lifecycle
|
implement all phases of the :ref:`package lifecycle
|
||||||
<package-lifecycle>` and manage the build process.
|
<package-lifecycle>` and manage the build process.
|
||||||
@@ -242,22 +241,6 @@ Unit tests
|
|||||||
Implements Spack's test suite. Add a module and put its name in
|
Implements Spack's test suite. Add a module and put its name in
|
||||||
the test suite in ``__init__.py`` to add more unit tests.
|
the test suite in ``__init__.py`` to add more unit tests.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Research and Monitoring Modules
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
:mod:`spack.monitor`
|
|
||||||
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
|
|
||||||
the ``spack install`` and ``spack analyze`` commands to send build and
|
|
||||||
package metadata up to a `Spack Monitor
|
|
||||||
<https://github.com/spack/spack-monitor>`_ server.
|
|
||||||
|
|
||||||
|
|
||||||
:mod:`spack.analyzers`
|
|
||||||
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
|
|
||||||
that provides base functions to run, save, and (optionally) upload analysis
|
|
||||||
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
Other Modules
|
Other Modules
|
||||||
@@ -301,240 +284,6 @@ Most spack commands look something like this:
|
|||||||
The information in Package files is used at all stages in this
|
The information in Package files is used at all stages in this
|
||||||
process.
|
process.
|
||||||
|
|
||||||
Conceptually, packages are overloaded. They contain:
|
|
||||||
|
|
||||||
-------------
|
|
||||||
Stage objects
|
|
||||||
-------------
|
|
||||||
|
|
||||||
|
|
||||||
.. _writing-analyzers:
|
|
||||||
|
|
||||||
-----------------
|
|
||||||
Writing analyzers
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
To write an analyzer, you should add a new python file to the
|
|
||||||
analyzers module directory at ``lib/spack/spack/analyzers`` .
|
|
||||||
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
|
|
||||||
to add an analyzer class ``Myanalyzer`` you would write to
|
|
||||||
``spack/analyzers/myanalyzer.py`` and import and
|
|
||||||
use the base as follows:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from .analyzer_base import AnalyzerBase
|
|
||||||
|
|
||||||
class Myanalyzer(AnalyzerBase):
|
|
||||||
|
|
||||||
|
|
||||||
Note that the class name is your module file name, all lowercase
|
|
||||||
except for the first capital letter. You can look at other analyzers in
|
|
||||||
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Analyzer Output Directory
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
By default, when you run ``spack analyze run`` an analyzer output directory will
|
|
||||||
be created in your spack user directory in your ``$HOME``. The reason we output here
|
|
||||||
is because the install directory might not always be writable.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
~/.spack/
|
|
||||||
analyzers
|
|
||||||
|
|
||||||
Result files will be written here, organized in subfolders in the same structure
|
|
||||||
as the package, with each analyzer owning it's own subfolder. for example:
|
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ tree ~/.spack/analyzers/
|
|
||||||
/home/spackuser/.spack/analyzers/
|
|
||||||
└── linux-ubuntu20.04-skylake
|
|
||||||
└── gcc-9.3.0
|
|
||||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
|
||||||
├── environment_variables
|
|
||||||
│ └── spack-analyzer-environment-variables.json
|
|
||||||
├── install_files
|
|
||||||
│ └── spack-analyzer-install-files.json
|
|
||||||
└── libabigail
|
|
||||||
└── lib
|
|
||||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
|
||||||
|
|
||||||
|
|
||||||
Notice that for the libabigail analyzer, since results are generated per object,
|
|
||||||
we honor the object's folder in case there are equivalently named files in
|
|
||||||
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
Analyzer Metadata
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Your analyzer is required to have the class attributes ``name``, ``outfile``,
|
|
||||||
and ``description``. These are printed to the user with they use the subcommand
|
|
||||||
``spack analyze list-analyzers``. Here is an example.
|
|
||||||
As we mentioned above, note that this analyzer would live in a module named
|
|
||||||
``libabigail.py`` in the analyzers folder so that the class can be discovered.
|
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Libabigail(AnalyzerBase):
|
|
||||||
|
|
||||||
name = "libabigail"
|
|
||||||
outfile = "spack-analyzer-libabigail.json"
|
|
||||||
description = "Application Binary Interface (ABI) features for objects"
|
|
||||||
|
|
||||||
|
|
||||||
This means that the name and output file should be unique for your analyzer.
|
|
||||||
Note that "all" cannot be the name of an analyzer, as this key is used to indicate
|
|
||||||
that the user wants to run all analyzers.
|
|
||||||
|
|
||||||
.. _analyzer_run_function:
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
An analyzer run Function
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The core of an analyzer is its ``run()`` function, which should accept no
|
|
||||||
arguments. You can assume your analyzer has the package spec of interest at ``self.spec``
|
|
||||||
and it's up to the run function to generate whatever analysis data you need,
|
|
||||||
and then return the object with a key as the analyzer name. The result data
|
|
||||||
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
|
|
||||||
and one of ``value`` or ``binary_value``. The install file should be for a relative
|
|
||||||
path, and not the absolute path. For example, let's say we extract a metric called
|
|
||||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
|
||||||
We might have data that looks like this:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
result = {"name": "metric", "analyzer_name": "thebest-analyzer", "value": "1", "install_file": "bin/wget"}
|
|
||||||
|
|
||||||
|
|
||||||
We'd then return it as follows - note that they key is the analyzer name at ``self.name``.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
return {self.name: result}
|
|
||||||
|
|
||||||
This will save the complete result to the analyzer metadata folder, as described
|
|
||||||
previously. If you want support for adding a different kind of metadata (e.g.,
|
|
||||||
not associated with an install file) then the monitor server would need to be updated
|
|
||||||
to support this first.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
An analyzer init Function
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
If you don't need any extra dependencies or checks, you can skip defining an analyzer
|
|
||||||
init function, as the base class will handle it. Typically, it will accept
|
|
||||||
a spec, and an optional output directory (if the user does not want the default
|
|
||||||
metadata folder for analyzer results). The analyzer init function should call
|
|
||||||
it's parent init, and then do any extra checks or validation that are required to
|
|
||||||
work. For example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def __init__(self, spec, dirname=None):
|
|
||||||
super(Myanalyzer, self).__init__(spec, dirname)
|
|
||||||
|
|
||||||
# install extra dependencies, do extra preparation and checks here
|
|
||||||
|
|
||||||
|
|
||||||
At the end of the init, you will have available to you:
|
|
||||||
|
|
||||||
- **self.spec**: the spec object
|
|
||||||
- **self.dirname**: an optional directory name the user as provided at init to save
|
|
||||||
- **self.output_dir**: the analyzer metadata directory, where we save by default
|
|
||||||
- **self.meta_dir**: the path to the package metadata directory (.spack) if you need it
|
|
||||||
|
|
||||||
And can proceed to write your analyzer.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Saving Analyzer Results
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The analyzer will have ``save_result`` called, with the result object generated
|
|
||||||
to save it to the filesystem, and if the user has added the ``--monitor`` flag
|
|
||||||
to upload it to a monitor server. If your result follows an accepted result
|
|
||||||
format and you don't need to parse it further, you don't need to add this
|
|
||||||
function to your class. However, if your result data is large or otherwise
|
|
||||||
needs additional parsing, you can define it. If you define the function, it
|
|
||||||
is useful to know about the ``output_dir`` property, which you can join
|
|
||||||
with your output file relative path of choice:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
outfile = os.path.join(self.output_dir, "my-output-file.txt")
|
|
||||||
|
|
||||||
|
|
||||||
The directory will be provided by the ``output_dir`` property but it won't exist,
|
|
||||||
so you should create it:
|
|
||||||
|
|
||||||
|
|
||||||
.. code::block:: python
|
|
||||||
|
|
||||||
# Create the output directory
|
|
||||||
if not os.path.exists(self._output_dir):
|
|
||||||
os.makedirs(self._output_dir)
|
|
||||||
|
|
||||||
|
|
||||||
If you are generating results that match to specific files in the package
|
|
||||||
install directory, you should try to maintain those paths in the case that
|
|
||||||
there are equivalently named files in different directories that would
|
|
||||||
overwrite one another. As an example of an analyzer with a custom save,
|
|
||||||
the Libabigail analyzer saves ``*.xml`` files to the analyzer metadata
|
|
||||||
folder in ``run()``, as they are either binaries, or as xml (text) would
|
|
||||||
usually be too big to pass in one request. For this reason, the files
|
|
||||||
are saved during ``run()`` and the filenames added to the result object,
|
|
||||||
and then when the result object is passed back into ``save_result()``,
|
|
||||||
we skip saving to the filesystem, and instead read the file and send
|
|
||||||
each one (separately) to the monitor:
|
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def save_result(self, result, monitor=None, overwrite=False):
|
|
||||||
"""ABI results are saved to individual files, so each one needs to be
|
|
||||||
read and uploaded. Result here should be the lookup generated in run(),
|
|
||||||
the key is the analyzer name, and each value is the result file.
|
|
||||||
We currently upload the entire xml as text because libabigail can't
|
|
||||||
easily read gzipped xml, but this will be updated when it can.
|
|
||||||
"""
|
|
||||||
if not monitor:
|
|
||||||
return
|
|
||||||
|
|
||||||
name = self.spec.package.name
|
|
||||||
|
|
||||||
for obj, filename in result.get(self.name, {}).items():
|
|
||||||
|
|
||||||
# Don't include the prefix
|
|
||||||
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
|
||||||
|
|
||||||
# We've already saved the results to file during run
|
|
||||||
content = spack.monitor.read_file(filename)
|
|
||||||
|
|
||||||
# A result needs an analyzer, value or binary_value, and name
|
|
||||||
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
|
||||||
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
|
||||||
monitor.send_analyze_metadata(self.spec.package, {"libabigail": [data]})
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Notice that this function, if you define it, requires a result object (generated by
|
|
||||||
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
|
|
||||||
to check if a result exists first, and not write to it if the result exists and
|
|
||||||
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
|
|
||||||
yet, you might want to do that here. You should also use ``tty.info`` to give
|
|
||||||
the user a message of "Writing result to $DIRNAME."
|
|
||||||
|
|
||||||
|
|
||||||
.. _writing-commands:
|
.. _writing-commands:
|
||||||
|
|
||||||
@@ -699,23 +448,6 @@ with a hook, and this is the purpose of this particular hook. Akin to
|
|||||||
``on_phase_success`` we require the same variables - the package that failed,
|
``on_phase_success`` we require the same variables - the package that failed,
|
||||||
the name of the phase, and the log file where we might find errors.
|
the name of the phase, and the log file where we might find errors.
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""
|
|
||||||
``on_analyzer_save(pkg, result)``
|
|
||||||
"""""""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
After an analyzer has saved some result for a package, this hook is called,
|
|
||||||
and it provides the package that we just ran the analysis for, along with
|
|
||||||
the loaded result. Typically, a result is structured to have the name
|
|
||||||
of the analyzer as key, and the result object that is defined in detail in
|
|
||||||
:ref:`analyzer_run_function`.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def on_analyzer_save(pkg, result):
|
|
||||||
"""given a package and a result...
|
|
||||||
"""
|
|
||||||
print('Do something extra with a package analysis result here')
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Adding a New Hook Type
|
Adding a New Hook Type
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 88 KiB |
@@ -273,19 +273,9 @@ or
|
|||||||
Concretizing
|
Concretizing
|
||||||
^^^^^^^^^^^^
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
Once some user specs have been added to an environment, they can be
|
Once some user specs have been added to an environment, they can be concretized.
|
||||||
concretized. *By default specs are concretized separately*, one after
|
There are at the moment three different modes of operation to concretize an environment,
|
||||||
the other. This mode of operation permits to deploy a full
|
which are explained in details in :ref:`environments_concretization_config`.
|
||||||
software stack where multiple configurations of the same package
|
|
||||||
need to be installed alongside each other. Central installations done
|
|
||||||
at HPC centers by system administrators or user support groups
|
|
||||||
are a common case that fits in this behavior.
|
|
||||||
Environments *can also be configured to concretize all
|
|
||||||
the root specs in a self-consistent way* to ensure that
|
|
||||||
each package in the environment comes with a single configuration. This
|
|
||||||
mode of operation is usually what is required by software developers that
|
|
||||||
want to deploy their development environment.
|
|
||||||
|
|
||||||
Regardless of which mode of operation has been chosen, the following
|
Regardless of which mode of operation has been chosen, the following
|
||||||
command will ensure all the root specs are concretized according to the
|
command will ensure all the root specs are concretized according to the
|
||||||
constraints that are prescribed in the configuration:
|
constraints that are prescribed in the configuration:
|
||||||
@@ -349,6 +339,24 @@ If the Environment has been concretized, Spack will install the
|
|||||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||||
the Environment and then install the concretized specs.
|
the Environment and then install the concretized specs.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Every ``spack install`` process builds one package at a time with multiple build
|
||||||
|
jobs, controlled by the ``-j`` flag and the ``config:build_jobs`` option
|
||||||
|
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||||
|
packages can be installed in parallel by launching more Spack instances. For
|
||||||
|
example, the following will build at most four packages in parallel using
|
||||||
|
three background jobs:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
[myenv]$ spack install & spack install & spack install & spack install
|
||||||
|
|
||||||
|
Another option is to generate a ``Makefile`` and run ``make -j<N>`` to control
|
||||||
|
the number of parallel install processes. See :ref:`env-generate-depfile`
|
||||||
|
for details.
|
||||||
|
|
||||||
|
|
||||||
As it installs, ``spack install`` creates symbolic links in the
|
As it installs, ``spack install`` creates symbolic links in the
|
||||||
``logs/`` directory in the Environment, allowing for easy inspection
|
``logs/`` directory in the Environment, allowing for easy inspection
|
||||||
of build logs related to that environment. The ``spack install``
|
of build logs related to that environment. The ``spack install``
|
||||||
@@ -475,32 +483,76 @@ Appending to this list in the yaml is identical to using the ``spack
|
|||||||
add`` command from the command line. However, there is more power
|
add`` command from the command line. However, there is more power
|
||||||
available from the yaml file.
|
available from the yaml file.
|
||||||
|
|
||||||
|
.. _environments_concretization_config:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
Spec concretization
|
Spec concretization
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
An environment can be concretized in three different modes and the behavior active under any environment
|
||||||
Specs can be concretized separately or together, as already
|
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||||
explained in :ref:`environments_concretization`. The behavior active
|
|
||||||
under any environment is determined by the ``concretization`` property:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- ncview
|
- hdf5~mpi
|
||||||
- netcdf
|
- hdf5+mpi
|
||||||
- nco
|
- zlib@1.2.8
|
||||||
- py-sphinx
|
concretizer:
|
||||||
concretization: together
|
unify: false
|
||||||
|
|
||||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||||
(the default).
|
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||||
|
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||||
|
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||||
|
then it will be used for both ``hdf5`` installations.
|
||||||
|
|
||||||
|
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||||
|
i.e. trying to maximize reuse of dependencies across different specs:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
specs:
|
||||||
|
- hdf5~mpi
|
||||||
|
- hdf5+mpi
|
||||||
|
- zlib@1.2.8
|
||||||
|
concretizer:
|
||||||
|
unify: when_possible
|
||||||
|
|
||||||
|
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||||
|
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||||
|
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||||
|
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||||
|
that fits either of these two modes.
|
||||||
|
|
||||||
|
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||||
|
ensure that each package in the environment comes with a single configuration:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
specs:
|
||||||
|
- hdf5+mpi
|
||||||
|
- zlib@1.2.8
|
||||||
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
|
||||||
|
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||||
|
environment and have a single view of it in the filesystem.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||||
|
replace the ``concretization`` property. For reference,
|
||||||
|
``concretization: together`` is replaced by ``concretizer:unify:true``,
|
||||||
|
and ``concretization: separately`` is replaced by ``concretizer:unify:false``.
|
||||||
|
|
||||||
.. admonition:: Re-concretization of user specs
|
.. admonition:: Re-concretization of user specs
|
||||||
|
|
||||||
When concretizing specs together the entire set of specs will be
|
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||||
re-concretized after any addition of new user specs, to ensure that
|
re-concretized after any addition of new user specs, to ensure that
|
||||||
the environment remains consistent. When instead the specs are concretized
|
the environment remains consistent / minimal. When instead the specs are concretized
|
||||||
separately only the new specs will be re-concretized after any addition.
|
separately only the new specs will be re-concretized after any addition.
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
@@ -747,7 +799,7 @@ directories.
|
|||||||
select: [^mpi]
|
select: [^mpi]
|
||||||
exclude: ['%pgi@18.5']
|
exclude: ['%pgi@18.5']
|
||||||
projections:
|
projections:
|
||||||
all: {name}/{version}-{compiler.name}
|
all: '{name}/{version}-{compiler.name}'
|
||||||
link: all
|
link: all
|
||||||
link_type: symlink
|
link_type: symlink
|
||||||
|
|
||||||
@@ -910,3 +962,89 @@ environment.
|
|||||||
|
|
||||||
The ``spack env deactivate`` command will remove the default view of
|
The ``spack env deactivate`` command will remove the default view of
|
||||||
the environment from the user's path.
|
the environment from the user's path.
|
||||||
|
|
||||||
|
|
||||||
|
.. _env-generate-depfile:
|
||||||
|
|
||||||
|
|
||||||
|
------------------------------------------
|
||||||
|
Generating Depfiles from Environments
|
||||||
|
------------------------------------------
|
||||||
|
|
||||||
|
Spack can generate ``Makefile``\s to make it easier to build multiple
|
||||||
|
packages in an environment in parallel. Generated ``Makefile``\s expose
|
||||||
|
targets that can be included in existing ``Makefile``\s, to allow
|
||||||
|
other targets to depend on the environment installation.
|
||||||
|
|
||||||
|
A typical workflow is as follows:
|
||||||
|
|
||||||
|
.. code:: console
|
||||||
|
|
||||||
|
spack env create -d .
|
||||||
|
spack -e . add perl
|
||||||
|
spack -e . concretize
|
||||||
|
spack -e . env depfile > Makefile
|
||||||
|
make -j64
|
||||||
|
|
||||||
|
This generates a ``Makefile`` from a concretized environment in the
|
||||||
|
current working directory, and ``make -j64`` installs the environment,
|
||||||
|
exploiting parallelism across packages as much as possible. Spack
|
||||||
|
respects the Make jobserver and forwards it to the build environment
|
||||||
|
of packages, meaning that a single ``-j`` flag is enough to control the
|
||||||
|
load, even when packages are built in parallel.
|
||||||
|
|
||||||
|
By default the following phony convenience targets are available:
|
||||||
|
|
||||||
|
- ``make all``: installs the environment (default target);
|
||||||
|
- ``make fetch-all``: only fetch sources of all packages;
|
||||||
|
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||||
|
|
||||||
|
.. tip::
|
||||||
|
|
||||||
|
GNU Make version 4.3 and above have great support for output synchronization
|
||||||
|
through the ``-O`` and ``--output-sync`` flags, which ensure that output is
|
||||||
|
printed orderly per package install. To get synchronized output with colors,
|
||||||
|
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||||
|
|
||||||
|
The following advanced example shows how generated targets can be used in a
|
||||||
|
``Makefile``:
|
||||||
|
|
||||||
|
.. code:: Makefile
|
||||||
|
|
||||||
|
SPACK ?= spack
|
||||||
|
|
||||||
|
.PHONY: all clean env
|
||||||
|
|
||||||
|
all: env
|
||||||
|
|
||||||
|
spack.lock: spack.yaml
|
||||||
|
$(SPACK) -e . concretize -f
|
||||||
|
|
||||||
|
env.mk: spack.lock
|
||||||
|
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||||
|
|
||||||
|
env: spack/env
|
||||||
|
$(info Environment installed!)
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf spack.lock env.mk spack/
|
||||||
|
|
||||||
|
ifeq (,$(filter clean,$(MAKECMDGOALS)))
|
||||||
|
include env.mk
|
||||||
|
endif
|
||||||
|
|
||||||
|
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||||
|
from its rule, which triggers concretization. When done, the generated target
|
||||||
|
``spack/env`` is available. In the above example, the ``env`` target uses this generated
|
||||||
|
target as a prerequisite, meaning that it can make use of the installed packages in
|
||||||
|
its commands.
|
||||||
|
|
||||||
|
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||||
|
the include is conditional.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When including generated ``Makefile``\s, it is important to use
|
||||||
|
the ``--make-target-prefix`` flag and use the non-phony target
|
||||||
|
``<target-prefix>/env`` as prerequisite, instead of the phony target
|
||||||
|
``<target-prefix>/all``.
|
||||||
|
|||||||
BIN
lib/spack/docs/images/ghcr_spack.png
Normal file
BIN
lib/spack/docs/images/ghcr_spack.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 70 KiB |
@@ -63,6 +63,7 @@ or refer to the full manual below.
|
|||||||
|
|
||||||
configuration
|
configuration
|
||||||
config_yaml
|
config_yaml
|
||||||
|
bootstrapping
|
||||||
build_settings
|
build_settings
|
||||||
environments
|
environments
|
||||||
containers
|
containers
|
||||||
|
|||||||
@@ -308,7 +308,7 @@ the variable ``FOOBAR`` will be unset.
|
|||||||
spec constraints are instead evaluated top to bottom.
|
spec constraints are instead evaluated top to bottom.
|
||||||
|
|
||||||
""""""""""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""""""""""
|
||||||
Blacklist or whitelist specific module files
|
Exclude or include specific module files
|
||||||
""""""""""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
You can use anonymous specs also to prevent module files from being written or
|
You can use anonymous specs also to prevent module files from being written or
|
||||||
@@ -322,8 +322,8 @@ your system. If you write a configuration file like:
|
|||||||
modules:
|
modules:
|
||||||
default:
|
default:
|
||||||
tcl:
|
tcl:
|
||||||
whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
|
include: ['gcc', 'llvm'] # include will have precedence over exclude
|
||||||
blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
exclude: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
||||||
|
|
||||||
you will prevent the generation of module files for any package that
|
you will prevent the generation of module files for any package that
|
||||||
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
||||||
@@ -490,7 +490,7 @@ satisfies a default, Spack will generate the module file in the
|
|||||||
appropriate path, and will generate a default symlink to the module
|
appropriate path, and will generate a default symlink to the module
|
||||||
file as well.
|
file as well.
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
If Spack is configured to generate multiple default packages in the
|
If Spack is configured to generate multiple default packages in the
|
||||||
same directory, the last modulefile to be generated will be the
|
same directory, the last modulefile to be generated will be the
|
||||||
default module.
|
default module.
|
||||||
@@ -589,7 +589,7 @@ Filter out environment modifications
|
|||||||
Modifications to certain environment variables in module files are there by
|
Modifications to certain environment variables in module files are there by
|
||||||
default, for instance because they are generated by prefix inspections.
|
default, for instance because they are generated by prefix inspections.
|
||||||
If you want to prevent modifications to some environment variables, you can
|
If you want to prevent modifications to some environment variables, you can
|
||||||
do so by using the environment blacklist:
|
do so by using the ``exclude_env_vars``:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -599,7 +599,7 @@ do so by using the environment blacklist:
|
|||||||
all:
|
all:
|
||||||
filter:
|
filter:
|
||||||
# Exclude changes to any of these variables
|
# Exclude changes to any of these variables
|
||||||
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
|
exclude_env_vars: ['CPATH', 'LIBRARY_PATH']
|
||||||
|
|
||||||
The configuration above will generate module files that will not contain
|
The configuration above will generate module files that will not contain
|
||||||
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
||||||
|
|||||||
@@ -1070,13 +1070,32 @@ Commits
|
|||||||
|
|
||||||
Submodules
|
Submodules
|
||||||
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
||||||
recursively along with the repository at fetch time. For more information
|
recursively along with the repository at fetch time.
|
||||||
about git submodules see the manpage of git: ``man git-submodule``.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||||
|
|
||||||
|
If a package has needs more fine-grained control over submodules, define
|
||||||
|
``submodules`` to be a callable function that takes the package instance as
|
||||||
|
its only argument. The function should return a list of submodules to be fetched.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def submodules(package):
|
||||||
|
submodules = []
|
||||||
|
if "+variant-1" in package.spec:
|
||||||
|
submodules.append("submodule_for_variant_1")
|
||||||
|
if "+variant-2" in package.spec:
|
||||||
|
submodules.append("submodule_for_variant_2")
|
||||||
|
return submodules
|
||||||
|
|
||||||
|
|
||||||
|
class MyPackage(Package):
|
||||||
|
version("0.1.0", submodules=submodules)
|
||||||
|
|
||||||
|
For more information about git submodules see the manpage of git: ``man
|
||||||
|
git-submodule``.
|
||||||
|
|
||||||
.. _github-fetch:
|
.. _github-fetch:
|
||||||
|
|
||||||
@@ -2393,9 +2412,9 @@ Influence how dependents are built or run
|
|||||||
|
|
||||||
Spack provides a mechanism for dependencies to influence the
|
Spack provides a mechanism for dependencies to influence the
|
||||||
environment of their dependents by overriding the
|
environment of their dependents by overriding the
|
||||||
:meth:`setup_dependent_run_environment <spack.package.PackageBase.setup_dependent_run_environment>`
|
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||||
or the
|
or the
|
||||||
:meth:`setup_dependent_build_environment <spack.package.PackageBase.setup_dependent_build_environment>`
|
:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>`
|
||||||
methods.
|
methods.
|
||||||
The Qt package, for instance, uses this call:
|
The Qt package, for instance, uses this call:
|
||||||
|
|
||||||
@@ -2417,7 +2436,7 @@ will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
|||||||
variables set appropriately before starting the installation. To make things
|
variables set appropriately before starting the installation. To make things
|
||||||
even simpler the ``python setup.py`` command is also inserted into the module
|
even simpler the ``python setup.py`` command is also inserted into the module
|
||||||
scope of dependents by overriding a third method called
|
scope of dependents by overriding a third method called
|
||||||
:meth:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`
|
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||||
:
|
:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||||
@@ -2775,6 +2794,256 @@ Suppose a user invokes ``spack install`` like this:
|
|||||||
Spack will fail with a constraint violation, because the version of
|
Spack will fail with a constraint violation, because the version of
|
||||||
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
||||||
|
|
||||||
|
.. _custom-attributes:
|
||||||
|
|
||||||
|
------------------
|
||||||
|
Custom attributes
|
||||||
|
------------------
|
||||||
|
|
||||||
|
Often a package will need to provide attributes for dependents to query
|
||||||
|
various details about what it provides. While any number of custom defined
|
||||||
|
attributes can be implemented by a package, the four specific attributes
|
||||||
|
described below are always available on every package with default
|
||||||
|
implementations and the ability to customize with alternate implementations
|
||||||
|
in the case of virtual packages provided:
|
||||||
|
|
||||||
|
=========== =========================================== =====================
|
||||||
|
Attribute Purpose Default
|
||||||
|
=========== =========================================== =====================
|
||||||
|
``home`` The installation path for the package ``spec.prefix``
|
||||||
|
``command`` An executable command for the package | ``spec.name`` found
|
||||||
|
in
|
||||||
|
| ``.home.bin``
|
||||||
|
``headers`` A list of headers provided by the package | All headers
|
||||||
|
searched
|
||||||
|
| recursively in
|
||||||
|
``.home.include``
|
||||||
|
``libs`` A list of libraries provided by the package | ``lib{spec.name}``
|
||||||
|
searched
|
||||||
|
| recursively in
|
||||||
|
``.home`` starting
|
||||||
|
| with ``lib``,
|
||||||
|
``lib64``, then the
|
||||||
|
| rest of ``.home``
|
||||||
|
=========== =========================================== =====================
|
||||||
|
|
||||||
|
Each of these can be customized by implementing the relevant attribute
|
||||||
|
as a ``@property`` in the package's class:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
:linenos:
|
||||||
|
|
||||||
|
class Foo(Package):
|
||||||
|
...
|
||||||
|
@property
|
||||||
|
def libs(self):
|
||||||
|
# The library provided by Foo is libMyFoo.so
|
||||||
|
return find_libraries('libMyFoo', root=self.home, recursive=True)
|
||||||
|
|
||||||
|
A package may also provide a custom implementation of each attribute
|
||||||
|
for the virtual packages it provides by implementing the
|
||||||
|
``virtualpackagename_attributename`` property in the package's class.
|
||||||
|
The implementation used is the first one found from:
|
||||||
|
|
||||||
|
#. Specialized virtual: ``Package.virtualpackagename_attributename``
|
||||||
|
#. Generic package: ``Package.attributename``
|
||||||
|
#. Default
|
||||||
|
|
||||||
|
The use of customized attributes is demonstrated in the next example.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Example: Customized attributes for virtual packages
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Consider a package ``foo`` that can optionally provide two virtual
|
||||||
|
packages ``bar`` and ``baz``. When both are enabled the installation tree
|
||||||
|
appears as follows:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
include/foo.h
|
||||||
|
include/bar/bar.h
|
||||||
|
lib64/libFoo.so
|
||||||
|
lib64/libFooBar.so
|
||||||
|
baz/include/baz/baz.h
|
||||||
|
baz/lib/libFooBaz.so
|
||||||
|
|
||||||
|
The install tree shows that ``foo`` is providing the header ``include/foo.h``
|
||||||
|
and library ``lib64/libFoo.so`` in it's install prefix. The virtual
|
||||||
|
package ``bar`` is providing ``include/bar/bar.h`` and library
|
||||||
|
``lib64/libFooBar.so``, also in ``foo``'s install prefix. The ``baz``
|
||||||
|
package, however, is provided in the ``baz`` subdirectory of ``foo``'s
|
||||||
|
prefix with the ``include/baz/baz.h`` header and ``lib/libFooBaz.so``
|
||||||
|
library. Such a package could implement the optional attributes as
|
||||||
|
follows:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
:linenos:
|
||||||
|
|
||||||
|
class Foo(Package):
|
||||||
|
...
|
||||||
|
variant('bar', default=False, description='Enable the Foo implementation of bar')
|
||||||
|
variant('baz', default=False, description='Enable the Foo implementation of baz')
|
||||||
|
...
|
||||||
|
provides('bar', when='+bar')
|
||||||
|
provides('baz', when='+baz')
|
||||||
|
....
|
||||||
|
|
||||||
|
# Just the foo headers
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
return find_headers('foo', root=self.home.include, recursive=False)
|
||||||
|
|
||||||
|
# Just the foo libraries
|
||||||
|
@property
|
||||||
|
def libs(self):
|
||||||
|
return find_libraries('libFoo', root=self.home, recursive=True)
|
||||||
|
|
||||||
|
# The header provided by the bar virutal package
|
||||||
|
@property
|
||||||
|
def bar_headers(self):
|
||||||
|
return find_headers('bar/bar.h', root=self.home.include, recursive=False)
|
||||||
|
|
||||||
|
# The libary provided by the bar virtual package
|
||||||
|
@property
|
||||||
|
def bar_libs(self):
|
||||||
|
return find_libraries('libFooBar', root=sef.home, recursive=True)
|
||||||
|
|
||||||
|
# The baz virtual package home
|
||||||
|
@property
|
||||||
|
def baz_home(self):
|
||||||
|
return self.prefix.baz
|
||||||
|
|
||||||
|
# The header provided by the baz virtual package
|
||||||
|
@property
|
||||||
|
def baz_headers(self):
|
||||||
|
return find_headers('baz/baz', root=self.baz_home.include, recursive=False)
|
||||||
|
|
||||||
|
# The library provided by the baz virtual package
|
||||||
|
@property
|
||||||
|
def baz_libs(self):
|
||||||
|
return find_libraries('libFooBaz', root=self.baz_home, recursive=True)
|
||||||
|
|
||||||
|
Now consider another package, ``foo-app``, depending on all three:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
:linenos:
|
||||||
|
|
||||||
|
class FooApp(CMakePackage):
|
||||||
|
...
|
||||||
|
depends_on('foo')
|
||||||
|
depends_on('bar')
|
||||||
|
depends_on('baz')
|
||||||
|
|
||||||
|
The resulting spec objects for it's dependencies shows the result of
|
||||||
|
the above attribute implementations:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# The core headers and libraries of the foo package
|
||||||
|
|
||||||
|
>>> spec['foo']
|
||||||
|
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||||
|
>>> spec['foo'].prefix
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||||
|
|
||||||
|
# home defaults to the package install prefix without an explicit implementation
|
||||||
|
>>> spec['foo'].home
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||||
|
|
||||||
|
# foo headers from the foo prefix
|
||||||
|
>>> spec['foo'].headers
|
||||||
|
HeaderList([
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/foo.h',
|
||||||
|
])
|
||||||
|
|
||||||
|
# foo include directories from the foo prefix
|
||||||
|
>>> spec['foo'].headers.directories
|
||||||
|
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||||
|
|
||||||
|
# foo libraries from the foo prefix
|
||||||
|
>>> spec['foo'].libs
|
||||||
|
LibraryList([
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFoo.so',
|
||||||
|
])
|
||||||
|
|
||||||
|
# foo library directories from the foo prefix
|
||||||
|
>>> spec['foo'].libs.directories
|
||||||
|
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# The virtual bar package in the same prefix as foo
|
||||||
|
|
||||||
|
# bar resolves to the foo package
|
||||||
|
>>> spec['bar']
|
||||||
|
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||||
|
>>> spec['bar'].prefix
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||||
|
|
||||||
|
# home defaults to the foo prefix without either a Foo.bar_home
|
||||||
|
# or Foo.home implementation
|
||||||
|
>>> spec['bar'].home
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||||
|
|
||||||
|
# bar header in the foo prefix
|
||||||
|
>>> spec['bar'].headers
|
||||||
|
HeaderList([
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/bar/bar.h'
|
||||||
|
])
|
||||||
|
|
||||||
|
# bar include dirs from the foo prefix
|
||||||
|
>>> spec['bar'].headers.directories
|
||||||
|
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||||
|
|
||||||
|
# bar library from the foo prefix
|
||||||
|
>>> spec['bar'].libs
|
||||||
|
LibraryList([
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFooBar.so'
|
||||||
|
])
|
||||||
|
|
||||||
|
# bar library directories from the foo prefix
|
||||||
|
>>> spec['bar'].libs.directories
|
||||||
|
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# The virtual baz package in a subdirectory of foo's prefix
|
||||||
|
|
||||||
|
# baz resolves to the foo package
|
||||||
|
>>> spec['baz']
|
||||||
|
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||||
|
>>> spec['baz'].prefix
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||||
|
|
||||||
|
# baz_home implementation provides the subdirectory inside the foo prefix
|
||||||
|
>>> spec['baz'].home
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz'
|
||||||
|
|
||||||
|
# baz headers in the baz subdirectory of the foo prefix
|
||||||
|
>>> spec['baz'].headers
|
||||||
|
HeaderList([
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include/baz/baz.h'
|
||||||
|
])
|
||||||
|
|
||||||
|
# baz include directories in the baz subdirectory of the foo prefix
|
||||||
|
>>> spec['baz'].headers.directories
|
||||||
|
[
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include'
|
||||||
|
]
|
||||||
|
|
||||||
|
# baz libraries in the baz subdirectory of the foo prefix
|
||||||
|
>>> spec['baz'].libs
|
||||||
|
LibraryList([
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so'
|
||||||
|
])
|
||||||
|
|
||||||
|
# baz library directories in the baz subdirectory of the foo porefix
|
||||||
|
>>> spec['baz'].libs.directories
|
||||||
|
[
|
||||||
|
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib'
|
||||||
|
]
|
||||||
|
|
||||||
.. _abstract-and-concrete:
|
.. _abstract-and-concrete:
|
||||||
|
|
||||||
-------------------------
|
-------------------------
|
||||||
@@ -3022,7 +3291,7 @@ The classes that are currently provided by Spack are:
|
|||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| **Base Class** | **Purpose** |
|
| **Base Class** | **Purpose** |
|
||||||
+==========================================================+==================================+
|
+==========================================================+==================================+
|
||||||
| :class:`~spack.package.Package` | General base class not |
|
| :class:`~spack.package_base.Package` | General base class not |
|
||||||
| | specialized for any build system |
|
| | specialized for any build system |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
||||||
@@ -3153,7 +3422,7 @@ for the install phase is:
|
|||||||
For those not used to Python instance methods, this is the
|
For those not used to Python instance methods, this is the
|
||||||
package itself. In this case it's an instance of ``Foo``, which
|
package itself. In this case it's an instance of ``Foo``, which
|
||||||
extends ``Package``. For API docs on Package objects, see
|
extends ``Package``. For API docs on Package objects, see
|
||||||
:py:class:`Package <spack.package.Package>`.
|
:py:class:`Package <spack.package_base.Package>`.
|
||||||
|
|
||||||
``spec``
|
``spec``
|
||||||
This is the concrete spec object created by Spack from an
|
This is the concrete spec object created by Spack from an
|
||||||
@@ -5476,6 +5745,24 @@ Version Lists
|
|||||||
|
|
||||||
Spack packages should list supported versions with the newest first.
|
Spack packages should list supported versions with the newest first.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Using ``home`` vs ``prefix``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
``home`` and ``prefix`` are both attributes that can be queried on a
|
||||||
|
package's dependencies, often when passing configure arguments pointing to the
|
||||||
|
location of a dependency. The difference is that while ``prefix`` is the
|
||||||
|
location on disk where a concrete package resides, ``home`` is the `logical`
|
||||||
|
location that a package resides, which may be different than ``prefix`` in
|
||||||
|
the case of virtual packages or other special circumstances. For most use
|
||||||
|
cases inside a package, it's dependency locations can be accessed via either
|
||||||
|
``self.spec['foo'].home`` or ``self.spec['foo'].prefix``. Specific packages
|
||||||
|
that should be consumed by dependents via ``.home`` instead of ``.prefix``
|
||||||
|
should be noted in their respective documentation.
|
||||||
|
|
||||||
|
See :ref:`custom-attributes` for more details and an example implementing
|
||||||
|
a custom ``home`` attribute.
|
||||||
|
|
||||||
---------------------------
|
---------------------------
|
||||||
Packaging workflow commands
|
Packaging workflow commands
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|||||||
@@ -115,7 +115,8 @@ And here's the spack environment built by the pipeline represented as a
|
|||||||
|
|
||||||
spack:
|
spack:
|
||||||
view: false
|
view: false
|
||||||
concretization: separately
|
concretizer:
|
||||||
|
unify: false
|
||||||
|
|
||||||
definitions:
|
definitions:
|
||||||
- pkgs:
|
- pkgs:
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ You can see the packages we added earlier in the ``specs:`` section. If you
|
|||||||
ever want to add more packages, you can either use ``spack add`` or manually
|
ever want to add more packages, you can either use ``spack add`` or manually
|
||||||
edit this file.
|
edit this file.
|
||||||
|
|
||||||
We also need to change the ``concretization:`` option. By default, Spack
|
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||||
concretizes each spec *separately*, allowing multiple versions of the same
|
concretizes each spec *separately*, allowing multiple versions of the same
|
||||||
package to coexist. Since we want a single consistent environment, we want to
|
package to coexist. Since we want a single consistent environment, we want to
|
||||||
concretize all of the specs *together*.
|
concretize all of the specs *together*.
|
||||||
@@ -78,7 +78,8 @@ Here is what your ``spack.yaml`` looks like with this new setting:
|
|||||||
# add package specs to the `specs` list
|
# add package specs to the `specs` list
|
||||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||||
view: true
|
view: true
|
||||||
concretization: together
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
Symlink location
|
Symlink location
|
||||||
|
|||||||
@@ -25,4 +25,5 @@ spack:
|
|||||||
- subversion
|
- subversion
|
||||||
# Plotting
|
# Plotting
|
||||||
- graphviz
|
- graphviz
|
||||||
concretization: together
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ bash, , , Compiler wrappers
|
|||||||
tar, , , Extract/create archives
|
tar, , , Extract/create archives
|
||||||
gzip, , , Compress/Decompress archives
|
gzip, , , Compress/Decompress archives
|
||||||
unzip, , , Compress/Decompress archives
|
unzip, , , Compress/Decompress archives
|
||||||
bzip, , , Compress/Decompress archives
|
bzip2, , , Compress/Decompress archives
|
||||||
xz, , , Compress/Decompress archives
|
xz, , , Compress/Decompress archives
|
||||||
zstd, , Optional, Compress/Decompress archives
|
zstd, , Optional, Compress/Decompress archives
|
||||||
file, , , Create/Use Buildcaches
|
file, , , Create/Use Buildcaches
|
||||||
@@ -15,4 +15,4 @@ gnupg2, , , Sign/Verify Buildcaches
|
|||||||
git, , , Manage Software Repositories
|
git, , , Manage Software Repositories
|
||||||
svn, , Optional, Manage Software Repositories
|
svn, , Optional, Manage Software Repositories
|
||||||
hg, , Optional, Manage Software Repositories
|
hg, , Optional, Manage Software Repositories
|
||||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||||
|
|||||||
|
6
lib/spack/env/cc
vendored
6
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh -f
|
||||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||||
#
|
#
|
||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
@@ -768,7 +768,9 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
|||||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||||
echo "[$mode] $command $input_command" >> "$input_log"
|
echo "[$mode] $command $input_command" >> "$input_log"
|
||||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
IFS="$lsep"
|
||||||
|
echo "[$mode] "$full_command_list >> "$output_log"
|
||||||
|
unset IFS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Execute the full command, preserving spaces with IFS set
|
# Execute the full command, preserving spaces with IFS set
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||||
|
|
||||||
argparse
|
argparse
|
||||||
--------
|
--------
|
||||||
|
|||||||
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
|||||||
``/proc/cpuinfo``
|
``/proc/cpuinfo``
|
||||||
"""
|
"""
|
||||||
info = {}
|
info = {}
|
||||||
with open("/proc/cpuinfo") as file:
|
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||||
for line in file:
|
for line in file:
|
||||||
key, separator, value = line.partition(":")
|
key, separator, value = line.partition(":")
|
||||||
|
|
||||||
@@ -80,26 +80,46 @@ def proc_cpuinfo():
|
|||||||
|
|
||||||
|
|
||||||
def _check_output(args, env):
|
def _check_output(args, env):
|
||||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||||
|
args, stdout=subprocess.PIPE, env=env
|
||||||
|
).communicate()[0]
|
||||||
return six.text_type(output.decode("utf-8"))
|
return six.text_type(output.decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
def _machine():
|
||||||
|
""" "Return the machine architecture we are on"""
|
||||||
|
operating_system = platform.system()
|
||||||
|
|
||||||
|
# If we are not on Darwin, trust what Python tells us
|
||||||
|
if operating_system != "Darwin":
|
||||||
|
return platform.machine()
|
||||||
|
|
||||||
|
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||||
|
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||||
|
# need to fix that.
|
||||||
|
#
|
||||||
|
# See: https://bugs.python.org/issue42704
|
||||||
|
output = _check_output(
|
||||||
|
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
if "Apple" in output:
|
||||||
|
# Note that a native Python interpreter on Apple M1 would return
|
||||||
|
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||||
|
return "aarch64"
|
||||||
|
|
||||||
|
return "x86_64"
|
||||||
|
|
||||||
|
|
||||||
@info_dict(operating_system="Darwin")
|
@info_dict(operating_system="Darwin")
|
||||||
def sysctl_info_dict():
|
def sysctl_info_dict():
|
||||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
child_environment = _ensure_bin_usrbin_in_path()
|
||||||
# usually found there
|
|
||||||
child_environment = dict(os.environ.items())
|
|
||||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
|
||||||
for additional_path in ("/sbin", "/usr/sbin"):
|
|
||||||
if additional_path not in search_paths:
|
|
||||||
search_paths.append(additional_path)
|
|
||||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
|
||||||
|
|
||||||
def sysctl(*args):
|
def sysctl(*args):
|
||||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||||
|
|
||||||
if platform.machine() == "x86_64":
|
if _machine() == "x86_64":
|
||||||
flags = (
|
flags = (
|
||||||
sysctl("-n", "machdep.cpu.features").lower()
|
sysctl("-n", "machdep.cpu.features").lower()
|
||||||
+ " "
|
+ " "
|
||||||
@@ -125,6 +145,18 @@ def sysctl(*args):
|
|||||||
return info
|
return info
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_bin_usrbin_in_path():
|
||||||
|
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||||
|
# usually found there
|
||||||
|
child_environment = dict(os.environ.items())
|
||||||
|
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||||
|
for additional_path in ("/sbin", "/usr/sbin"):
|
||||||
|
if additional_path not in search_paths:
|
||||||
|
search_paths.append(additional_path)
|
||||||
|
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||||
|
return child_environment
|
||||||
|
|
||||||
|
|
||||||
def adjust_raw_flags(info):
|
def adjust_raw_flags(info):
|
||||||
"""Adjust the flags detected on the system to homogenize
|
"""Adjust the flags detected on the system to homogenize
|
||||||
slightly different representations.
|
slightly different representations.
|
||||||
@@ -184,12 +216,7 @@ def compatible_microarchitectures(info):
|
|||||||
Args:
|
Args:
|
||||||
info (dict): dictionary containing information on the host cpu
|
info (dict): dictionary containing information on the host cpu
|
||||||
"""
|
"""
|
||||||
architecture_family = platform.machine()
|
architecture_family = _machine()
|
||||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
|
||||||
# so we should normalize the name here
|
|
||||||
if architecture_family == "arm64":
|
|
||||||
architecture_family = "aarch64"
|
|
||||||
|
|
||||||
# If a tester is not registered, be conservative and assume no known
|
# If a tester is not registered, be conservative and assume no known
|
||||||
# target is compatible with the host
|
# target is compatible with the host
|
||||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||||
@@ -244,12 +271,7 @@ def compatibility_check(architecture_family):
|
|||||||
architecture_family = (architecture_family,)
|
architecture_family = (architecture_family,)
|
||||||
|
|
||||||
def decorator(func):
|
def decorator(func):
|
||||||
# pylint: disable=fixme
|
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
|
||||||
# TODO: an update + a dict comprehension
|
|
||||||
for arch_family in architecture_family:
|
|
||||||
COMPATIBILITY_CHECKS[arch_family] = func
|
|
||||||
|
|
||||||
return func
|
return func
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
@@ -288,7 +310,7 @@ def compatibility_check_for_x86_64(info, target):
|
|||||||
arch_root = TARGETS[basename]
|
arch_root = TARGETS[basename]
|
||||||
return (
|
return (
|
||||||
(target == arch_root or arch_root in target.ancestors)
|
(target == arch_root or arch_root in target.ancestors)
|
||||||
and (target.vendor == vendor or target.vendor == "generic")
|
and target.vendor in (vendor, "generic")
|
||||||
and target.features.issubset(features)
|
and target.features.issubset(features)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -303,8 +325,9 @@ def compatibility_check_for_aarch64(info, target):
|
|||||||
arch_root = TARGETS[basename]
|
arch_root = TARGETS[basename]
|
||||||
return (
|
return (
|
||||||
(target == arch_root or arch_root in target.ancestors)
|
(target == arch_root or arch_root in target.ancestors)
|
||||||
and (target.vendor == vendor or target.vendor == "generic")
|
and target.vendor in (vendor, "generic")
|
||||||
and target.features.issubset(features)
|
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||||
|
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
|||||||
try:
|
try:
|
||||||
from collections.abc import MutableMapping # novm
|
from collections.abc import MutableMapping # novm
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from collections import MutableMapping
|
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||||
|
|
||||||
|
|
||||||
class LazyDictionary(MutableMapping):
|
class LazyDictionary(MutableMapping):
|
||||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
|||||||
|
|
||||||
def _factory():
|
def _factory():
|
||||||
filename = os.path.join(json_dir, json_file)
|
filename = os.path.join(json_dir, json_file)
|
||||||
with open(filename, "r") as file:
|
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||||
return json.load(file)
|
return json.load(file)
|
||||||
|
|
||||||
return _factory
|
return _factory
|
||||||
|
|||||||
@@ -85,7 +85,21 @@
|
|||||||
"intel": [
|
"intel": [
|
||||||
{
|
{
|
||||||
"versions": ":",
|
"versions": ":",
|
||||||
"name": "pentium4",
|
"name": "x86-64",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "x86-64",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "x86-64",
|
||||||
"flags": "-march={name} -mtune=generic"
|
"flags": "-march={name} -mtune=generic"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -129,6 +143,20 @@
|
|||||||
"name": "x86-64",
|
"name": "x86-64",
|
||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": "2021.2.0:",
|
||||||
|
"name": "x86-64-v2",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": "2021.2.0:",
|
||||||
|
"name": "x86-64-v2",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -186,6 +214,20 @@
|
|||||||
"name": "x86-64",
|
"name": "x86-64",
|
||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": "2021.2.0:",
|
||||||
|
"name": "x86-64-v3",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": "2021.2.0:",
|
||||||
|
"name": "x86-64-v3",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -248,6 +290,20 @@
|
|||||||
"name": "x86-64",
|
"name": "x86-64",
|
||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": "2021.2.0:",
|
||||||
|
"name": "x86-64-v4",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": "2021.2.0:",
|
||||||
|
"name": "x86-64-v4",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -288,8 +344,19 @@
|
|||||||
"intel": [
|
"intel": [
|
||||||
{
|
{
|
||||||
"versions": "16.0:",
|
"versions": "16.0:",
|
||||||
"name": "pentium4",
|
"flags": "-march={name} -mtune={name}"
|
||||||
"flags": "-march={name} -mtune=generic"
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -333,6 +400,18 @@
|
|||||||
"versions": "16.0:",
|
"versions": "16.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -384,6 +463,20 @@
|
|||||||
"name": "corei7",
|
"name": "corei7",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "corei7",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "corei7",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -432,6 +525,20 @@
|
|||||||
"name": "corei7",
|
"name": "corei7",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "corei7",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "corei7",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -490,6 +597,18 @@
|
|||||||
"versions": "18.0:",
|
"versions": "18.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -550,6 +669,18 @@
|
|||||||
"versions": "18.0:",
|
"versions": "18.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -615,6 +746,18 @@
|
|||||||
"versions": "18.0:",
|
"versions": "18.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -672,6 +815,18 @@
|
|||||||
"versions": "18.0:",
|
"versions": "18.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -732,6 +887,18 @@
|
|||||||
"versions": "18.0:",
|
"versions": "18.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -798,6 +965,20 @@
|
|||||||
"name": "knl",
|
"name": "knl",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "knl",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "knl",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -868,6 +1049,20 @@
|
|||||||
"name": "skylake-avx512",
|
"name": "skylake-avx512",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "skylake-avx512",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "skylake-avx512",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -937,6 +1132,18 @@
|
|||||||
"versions": "18.0:",
|
"versions": "18.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1004,6 +1211,18 @@
|
|||||||
"versions": "19.0.1:",
|
"versions": "19.0.1:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1098,6 +1317,20 @@
|
|||||||
"name": "icelake-client",
|
"name": "icelake-client",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "icelake-client",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"name": "icelake-client",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1142,6 +1375,20 @@
|
|||||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
"flags": "-msse2"
|
"flags": "-msse2"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse2"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1192,6 +1439,20 @@
|
|||||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
"flags": "-msse3"
|
"flags": "-msse3"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse3"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse3"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1246,6 +1507,20 @@
|
|||||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
"flags": "-msse3"
|
"flags": "-msse3"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse3"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse3"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1301,6 +1576,20 @@
|
|||||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
"flags": "-msse4.2"
|
"flags": "-msse4.2"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse4.2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"flags": "-msse4.2"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1360,6 +1649,22 @@
|
|||||||
"name": "core-avx2",
|
"name": "core-avx2",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1422,6 +1727,22 @@
|
|||||||
"name": "core-avx2",
|
"name": "core-avx2",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1485,6 +1806,22 @@
|
|||||||
"name": "core-avx2",
|
"name": "core-avx2",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1543,6 +1880,30 @@
|
|||||||
"name": "znver3",
|
"name": "znver3",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"intel": [
|
||||||
|
{
|
||||||
|
"versions": "16.0:",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"oneapi": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dpcpp": [
|
||||||
|
{
|
||||||
|
"versions": ":",
|
||||||
|
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||||
|
"name": "core-avx2",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1788,7 +2149,6 @@
|
|||||||
"fp",
|
"fp",
|
||||||
"asimd",
|
"asimd",
|
||||||
"evtstrm",
|
"evtstrm",
|
||||||
"aes",
|
|
||||||
"pmull",
|
"pmull",
|
||||||
"sha1",
|
"sha1",
|
||||||
"sha2",
|
"sha2",
|
||||||
@@ -1821,18 +2181,26 @@
|
|||||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "8:",
|
"versions": "8:10.2",
|
||||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "10.3:",
|
||||||
|
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"clang": [
|
"clang": [
|
||||||
{
|
{
|
||||||
"versions": "3.9:4.9",
|
"versions": "3.9:4.9",
|
||||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "5:",
|
"versions": "5:10",
|
||||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "11:",
|
||||||
|
"flags": "-mcpu=a64fx"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"arm": [
|
"arm": [
|
||||||
@@ -1954,7 +2322,40 @@
|
|||||||
"m1": {
|
"m1": {
|
||||||
"from": ["aarch64"],
|
"from": ["aarch64"],
|
||||||
"vendor": "Apple",
|
"vendor": "Apple",
|
||||||
"features": [],
|
"features": [
|
||||||
|
"fp",
|
||||||
|
"asimd",
|
||||||
|
"evtstrm",
|
||||||
|
"aes",
|
||||||
|
"pmull",
|
||||||
|
"sha1",
|
||||||
|
"sha2",
|
||||||
|
"crc32",
|
||||||
|
"atomics",
|
||||||
|
"fphp",
|
||||||
|
"asimdhp",
|
||||||
|
"cpuid",
|
||||||
|
"asimdrdm",
|
||||||
|
"jscvt",
|
||||||
|
"fcma",
|
||||||
|
"lrcpc",
|
||||||
|
"dcpop",
|
||||||
|
"sha3",
|
||||||
|
"asimddp",
|
||||||
|
"sha512",
|
||||||
|
"asimdfhm",
|
||||||
|
"dit",
|
||||||
|
"uscat",
|
||||||
|
"ilrcpc",
|
||||||
|
"flagm",
|
||||||
|
"ssbs",
|
||||||
|
"sb",
|
||||||
|
"paca",
|
||||||
|
"pacg",
|
||||||
|
"dcpodp",
|
||||||
|
"flagm2",
|
||||||
|
"frint"
|
||||||
|
],
|
||||||
"compilers": {
|
"compilers": {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
{
|
{
|
||||||
@@ -1964,14 +2365,22 @@
|
|||||||
],
|
],
|
||||||
"clang" : [
|
"clang" : [
|
||||||
{
|
{
|
||||||
"versions": "9.0:",
|
"versions": "9.0:12.0",
|
||||||
"flags" : "-march=armv8.4-a"
|
"flags" : "-march=armv8.4-a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "13.0:",
|
||||||
|
"flags" : "-mcpu=apple-m1"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"apple-clang": [
|
"apple-clang": [
|
||||||
{
|
{
|
||||||
"versions": "11.0:",
|
"versions": "11.0:12.5",
|
||||||
"flags" : "-march=armv8.4-a"
|
"flags" : "-march=armv8.4-a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "13.0:",
|
||||||
|
"flags" : "-mcpu=apple-m1"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -64,6 +64,7 @@
|
|||||||
'is_exe',
|
'is_exe',
|
||||||
'join_path',
|
'join_path',
|
||||||
'last_modification_time_recursive',
|
'last_modification_time_recursive',
|
||||||
|
'library_extensions',
|
||||||
'mkdirp',
|
'mkdirp',
|
||||||
'partition_path',
|
'partition_path',
|
||||||
'prefixes',
|
'prefixes',
|
||||||
@@ -109,12 +110,15 @@ def path_contains_subdirectory(path, root):
|
|||||||
return norm_path.startswith(norm_root)
|
return norm_path.startswith(norm_root)
|
||||||
|
|
||||||
|
|
||||||
|
#: This generates the library filenames that may appear on any OS.
|
||||||
|
library_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||||
|
|
||||||
|
|
||||||
def possible_library_filenames(library_names):
|
def possible_library_filenames(library_names):
|
||||||
"""Given a collection of library names like 'libfoo', generate the set of
|
"""Given a collection of library names like 'libfoo', generate the set of
|
||||||
library filenames that may be found on the system (e.g. libfoo.so). This
|
library filenames that may be found on the system (e.g. libfoo.so).
|
||||||
generates the library filenames that may appear on any OS.
|
|
||||||
"""
|
"""
|
||||||
lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
lib_extensions = library_extensions
|
||||||
return set(
|
return set(
|
||||||
'.'.join((lib, extension)) for lib, extension in
|
'.'.join((lib, extension)) for lib, extension in
|
||||||
itertools.product(library_names, lib_extensions))
|
itertools.product(library_names, lib_extensions))
|
||||||
@@ -304,6 +308,68 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
|||||||
filter_file(double_quoted, '"%s"' % repl, f)
|
filter_file(double_quoted, '"%s"' % repl, f)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def exploding_archive_catch(stage):
|
||||||
|
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||||
|
# a single directory. If the tarball *didn't* explode, move its
|
||||||
|
# contents to the staging source directory & remove the container
|
||||||
|
# directory. If the tarball did explode, just rename the tarball
|
||||||
|
# directory to the staging source directory.
|
||||||
|
#
|
||||||
|
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||||
|
# hidden files, which can end up *alongside* a single top-level
|
||||||
|
# directory. We initially ignore presence of hidden files to
|
||||||
|
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||||
|
# are copied to the source directory.
|
||||||
|
|
||||||
|
# Expand all tarballs in their own directory to contain
|
||||||
|
# exploding tarballs.
|
||||||
|
tarball_container = os.path.join(stage.path,
|
||||||
|
"spack-expanded-archive")
|
||||||
|
mkdirp(tarball_container)
|
||||||
|
orig_dir = os.getcwd()
|
||||||
|
os.chdir(tarball_container)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
# catch an exploding archive on sucessful extraction
|
||||||
|
os.chdir(orig_dir)
|
||||||
|
exploding_archive_handler(tarball_container, stage)
|
||||||
|
except Exception as e:
|
||||||
|
# return current directory context to previous on failure
|
||||||
|
os.chdir(orig_dir)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def exploding_archive_handler(tarball_container, stage):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
tarball_container: where the archive was expanded to
|
||||||
|
stage: Stage object referencing filesystem location
|
||||||
|
where archive is being expanded
|
||||||
|
"""
|
||||||
|
files = os.listdir(tarball_container)
|
||||||
|
non_hidden = [f for f in files if not f.startswith('.')]
|
||||||
|
if len(non_hidden) == 1:
|
||||||
|
src = os.path.join(tarball_container, non_hidden[0])
|
||||||
|
if os.path.isdir(src):
|
||||||
|
stage.srcdir = non_hidden[0]
|
||||||
|
shutil.move(src, stage.source_path)
|
||||||
|
if len(files) > 1:
|
||||||
|
files.remove(non_hidden[0])
|
||||||
|
for f in files:
|
||||||
|
src = os.path.join(tarball_container, f)
|
||||||
|
dest = os.path.join(stage.path, f)
|
||||||
|
shutil.move(src, dest)
|
||||||
|
os.rmdir(tarball_container)
|
||||||
|
else:
|
||||||
|
# This is a non-directory entry (e.g., a patch file) so simply
|
||||||
|
# rename the tarball container to be the source path.
|
||||||
|
shutil.move(tarball_container, stage.source_path)
|
||||||
|
else:
|
||||||
|
shutil.move(tarball_container, stage.source_path)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def get_owner_uid(path, err_msg=None):
|
def get_owner_uid(path, err_msg=None):
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
@@ -363,7 +429,7 @@ def group_ids(uid=None):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def chgrp(path, group):
|
def chgrp(path, group, follow_symlinks=True):
|
||||||
"""Implement the bash chgrp function on a single path"""
|
"""Implement the bash chgrp function on a single path"""
|
||||||
if is_windows:
|
if is_windows:
|
||||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||||
@@ -372,7 +438,10 @@ def chgrp(path, group):
|
|||||||
gid = grp.getgrnam(group).gr_gid
|
gid = grp.getgrnam(group).gr_gid
|
||||||
else:
|
else:
|
||||||
gid = group
|
gid = group
|
||||||
os.chown(path, -1, gid)
|
if follow_symlinks:
|
||||||
|
os.chown(path, -1, gid)
|
||||||
|
else:
|
||||||
|
os.lchown(path, -1, gid)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
@@ -764,39 +833,36 @@ def __init__(self, inner_exception, outer_exception):
|
|||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
def replace_directory_transaction(directory_name):
|
||||||
"""Moves a directory to a temporary space. If the operations executed
|
"""Temporarily renames a directory in the same parent dir. If the operations
|
||||||
within the context manager don't raise an exception, the directory is
|
executed within the context manager don't raise an exception, the renamed directory
|
||||||
deleted. If there is an exception, the move is undone.
|
is deleted. If there is an exception, the move is undone.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
directory_name (path): absolute path of the directory name
|
directory_name (path): absolute path of the directory name
|
||||||
tmp_root (path): absolute path of the parent directory where to create
|
|
||||||
the temporary
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
temporary directory where ``directory_name`` has been moved
|
temporary directory where ``directory_name`` has been moved
|
||||||
"""
|
"""
|
||||||
# Check the input is indeed a directory with absolute path.
|
# Check the input is indeed a directory with absolute path.
|
||||||
# Raise before anything is done to avoid moving the wrong directory
|
# Raise before anything is done to avoid moving the wrong directory
|
||||||
assert os.path.isdir(directory_name), \
|
directory_name = os.path.abspath(directory_name)
|
||||||
'Invalid directory: ' + directory_name
|
assert os.path.isdir(directory_name), 'Not a directory: ' + directory_name
|
||||||
assert os.path.isabs(directory_name), \
|
|
||||||
'"directory_name" must contain an absolute path: ' + directory_name
|
|
||||||
|
|
||||||
directory_basename = os.path.basename(directory_name)
|
# Note: directory_name is normalized here, meaning the trailing slash is dropped,
|
||||||
|
# so dirname is the directory's parent not the directory itself.
|
||||||
|
tmpdir = tempfile.mkdtemp(
|
||||||
|
dir=os.path.dirname(directory_name),
|
||||||
|
prefix='.backup')
|
||||||
|
|
||||||
if tmp_root is not None:
|
# We have to jump through hoops to support Windows, since
|
||||||
assert os.path.isabs(tmp_root)
|
# os.rename(directory_name, tmpdir) errors there.
|
||||||
|
backup_dir = os.path.join(tmpdir, 'backup')
|
||||||
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
|
os.rename(directory_name, backup_dir)
|
||||||
tty.debug('Temporary directory created [{0}]'.format(tmp_dir))
|
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, backup_dir))
|
||||||
|
|
||||||
shutil.move(src=directory_name, dst=tmp_dir)
|
|
||||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield tmp_dir
|
yield backup_dir
|
||||||
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
||||||
# Try to recover the original directory, if this fails, raise a
|
# Try to recover the original directory, if this fails, raise a
|
||||||
# composite exception.
|
# composite exception.
|
||||||
@@ -804,10 +870,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
|||||||
# Delete what was there, before copying back the original content
|
# Delete what was there, before copying back the original content
|
||||||
if os.path.exists(directory_name):
|
if os.path.exists(directory_name):
|
||||||
shutil.rmtree(directory_name)
|
shutil.rmtree(directory_name)
|
||||||
shutil.move(
|
os.rename(backup_dir, directory_name)
|
||||||
src=os.path.join(tmp_dir, directory_basename),
|
|
||||||
dst=os.path.dirname(directory_name)
|
|
||||||
)
|
|
||||||
except Exception as outer_exception:
|
except Exception as outer_exception:
|
||||||
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
||||||
|
|
||||||
@@ -815,8 +878,8 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
|||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
# Otherwise delete the temporary directory
|
# Otherwise delete the temporary directory
|
||||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
tty.debug('Temporary directory deleted [{0}]'.format(tmpdir))
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -1097,7 +1160,32 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
|||||||
for f in dir_entries:
|
for f in dir_entries:
|
||||||
if sys.version_info >= (3, 5, 0):
|
if sys.version_info >= (3, 5, 0):
|
||||||
rel_child = os.path.join(rel_path, f.name)
|
rel_child = os.path.join(rel_path, f.name)
|
||||||
islink, isdir = f.is_symlink(), f.is_dir()
|
islink = f.is_symlink()
|
||||||
|
# On Windows, symlinks to directories are distinct from
|
||||||
|
# symlinks to files, and it is possible to create a
|
||||||
|
# broken symlink to a directory (e.g. using os.symlink
|
||||||
|
# without `target_is_directory=True`), invoking `isdir`
|
||||||
|
# on a symlink on Windows that is broken in this manner
|
||||||
|
# will result in an error. In this case we can work around
|
||||||
|
# the issue by reading the target and resolving the
|
||||||
|
# directory ourselves
|
||||||
|
try:
|
||||||
|
isdir = f.is_dir()
|
||||||
|
except OSError as e:
|
||||||
|
if is_windows and hasattr(e, 'winerror')\
|
||||||
|
and e.winerror == 5 and islink:
|
||||||
|
# if path is a symlink, determine destination and
|
||||||
|
# evaluate file vs directory
|
||||||
|
link_target = resolve_link_target_relative_to_the_link(f)
|
||||||
|
# link_target might be relative but
|
||||||
|
# resolve_link_target_relative_to_the_link
|
||||||
|
# will ensure that if so, that it is relative
|
||||||
|
# to the CWD and therefore
|
||||||
|
# makes sense
|
||||||
|
isdir = os.path.isdir(link_target)
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
|
||||||
else:
|
else:
|
||||||
rel_child = os.path.join(rel_path, f)
|
rel_child = os.path.join(rel_path, f)
|
||||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
||||||
@@ -1105,7 +1193,7 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not isdir:
|
if not isdir:
|
||||||
# Handle files
|
# handle files
|
||||||
visitor.visit_file(root, rel_child, depth)
|
visitor.visit_file(root, rel_child, depth)
|
||||||
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
|
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
|
||||||
# Handle ordinary directories
|
# Handle ordinary directories
|
||||||
@@ -1180,6 +1268,35 @@ def remove_if_dead_link(path):
|
|||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def readonly_file_handler(ignore_errors=False):
|
||||||
|
# TODO: generate stages etc. with write permissions wherever
|
||||||
|
# so this callback is no-longer required
|
||||||
|
"""
|
||||||
|
Generate callback for shutil.rmtree to handle permissions errors on
|
||||||
|
Windows. Some files may unexpectedly lack write permissions even
|
||||||
|
though they were generated by Spack on behalf of the user (e.g. the
|
||||||
|
stage), so this callback will detect such cases and modify the
|
||||||
|
permissions if that is the issue. For other errors, the fallback
|
||||||
|
is either to raise (if ignore_errors is False) or ignore (if
|
||||||
|
ignore_errors is True). This is only intended for Windows systems
|
||||||
|
and will raise a separate error if it is ever invoked (by accident)
|
||||||
|
on a non-Windows system.
|
||||||
|
"""
|
||||||
|
def error_remove_readonly(func, path, exc):
|
||||||
|
if not is_windows:
|
||||||
|
raise RuntimeError("This method should only be invoked on Windows")
|
||||||
|
excvalue = exc[1]
|
||||||
|
if is_windows and func in (os.rmdir, os.remove, os.unlink) and\
|
||||||
|
excvalue.errno == errno.EACCES:
|
||||||
|
# change the file to be readable,writable,executable: 0777
|
||||||
|
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
||||||
|
# retry
|
||||||
|
func(path)
|
||||||
|
elif not ignore_errors:
|
||||||
|
raise
|
||||||
|
return error_remove_readonly
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def remove_linked_tree(path):
|
def remove_linked_tree(path):
|
||||||
"""Removes a directory and its contents.
|
"""Removes a directory and its contents.
|
||||||
@@ -1187,23 +1304,18 @@ def remove_linked_tree(path):
|
|||||||
If the directory is a symlink, follows the link and removes the real
|
If the directory is a symlink, follows the link and removes the real
|
||||||
directory before removing the link.
|
directory before removing the link.
|
||||||
|
|
||||||
|
This method will force-delete files on Windows
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
path (str): Directory to be removed
|
path (str): Directory to be removed
|
||||||
"""
|
"""
|
||||||
# On windows, cleaning a Git stage can be an issue
|
|
||||||
# as git leaves readonly files that Python handles
|
|
||||||
# poorly on Windows. Remove readonly status and try again
|
|
||||||
def onerror(func, path, exe_info):
|
|
||||||
os.chmod(path, stat.S_IWUSR)
|
|
||||||
try:
|
|
||||||
func(path)
|
|
||||||
except Exception as e:
|
|
||||||
tty.warn(e)
|
|
||||||
pass
|
|
||||||
|
|
||||||
kwargs = {'ignore_errors': True}
|
kwargs = {'ignore_errors': True}
|
||||||
|
|
||||||
|
# Windows readonly files cannot be removed by Python
|
||||||
|
# directly.
|
||||||
if is_windows:
|
if is_windows:
|
||||||
kwargs = {'onerror': onerror}
|
kwargs['ignore_errors'] = False
|
||||||
|
kwargs['onerror'] = readonly_file_handler(ignore_errors=True)
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
if os.path.islink(path):
|
if os.path.islink(path):
|
||||||
|
|||||||
@@ -11,7 +11,9 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import traceback
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
import six
|
import six
|
||||||
from six import string_types
|
from six import string_types
|
||||||
@@ -1009,3 +1011,76 @@ def __repr__(self):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.data)
|
return str(self.data)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupedExceptionHandler(object):
|
||||||
|
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
"""Whether any exceptions were handled."""
|
||||||
|
return bool(self.exceptions)
|
||||||
|
|
||||||
|
def forward(self, context):
|
||||||
|
# type: (str) -> GroupedExceptionForwarder
|
||||||
|
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||||
|
return GroupedExceptionForwarder(context, self)
|
||||||
|
|
||||||
|
def _receive_forwarded(self, context, exc, tb):
|
||||||
|
# type: (str, Exception, List[str]) -> None
|
||||||
|
self.exceptions.append((context, exc, tb))
|
||||||
|
|
||||||
|
def grouped_message(self, with_tracebacks=True):
|
||||||
|
# type: (bool) -> str
|
||||||
|
"""Print out an error message coalescing all the forwarded errors."""
|
||||||
|
each_exception_message = [
|
||||||
|
'{0} raised {1}: {2}{3}'.format(
|
||||||
|
context,
|
||||||
|
exc.__class__.__name__,
|
||||||
|
exc,
|
||||||
|
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||||
|
)
|
||||||
|
for context, exc, tb in self.exceptions
|
||||||
|
]
|
||||||
|
return 'due to the following failures:\n{0}'.format(
|
||||||
|
'\n'.join(each_exception_message)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupedExceptionForwarder(object):
|
||||||
|
"""A contextmanager to capture exceptions and forward them to a
|
||||||
|
GroupedExceptionHandler."""
|
||||||
|
|
||||||
|
def __init__(self, context, handler):
|
||||||
|
# type: (str, GroupedExceptionHandler) -> None
|
||||||
|
self._context = context
|
||||||
|
self._handler = handler
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, tb):
|
||||||
|
if exc_value is not None:
|
||||||
|
self._handler._receive_forwarded(
|
||||||
|
self._context,
|
||||||
|
exc_value,
|
||||||
|
traceback.format_tb(tb),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Suppress any exception from being re-raised:
|
||||||
|
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class classproperty(object):
|
||||||
|
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||||
|
the evaluation is injected at creation time and take an instance (could be None) and
|
||||||
|
an owner (i.e. the class that originated the instance)
|
||||||
|
"""
|
||||||
|
def __init__(self, callback):
|
||||||
|
self.callback = callback
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
return self.callback(owner)
|
||||||
|
|||||||
@@ -809,19 +809,23 @@ def __enter__(self):
|
|||||||
def background_reader(reader, echo_writer, _kill):
|
def background_reader(reader, echo_writer, _kill):
|
||||||
# for each line printed to logfile, read it
|
# for each line printed to logfile, read it
|
||||||
# if echo: write line to user
|
# if echo: write line to user
|
||||||
while True:
|
try:
|
||||||
is_killed = _kill.wait(.1)
|
while True:
|
||||||
self.stderr.flush()
|
is_killed = _kill.wait(.1)
|
||||||
self.stdout.flush()
|
# Flush buffered build output to file
|
||||||
line = reader.readline()
|
# stdout/err fds refer to log file
|
||||||
while line:
|
self.stderr.flush()
|
||||||
if self.echo:
|
self.stdout.flush()
|
||||||
self.echo_writer.write('{0}'.format(line.decode()))
|
|
||||||
self.echo_writer.flush()
|
|
||||||
line = reader.readline()
|
|
||||||
|
|
||||||
if is_killed:
|
line = reader.readline()
|
||||||
break
|
if self.echo and line:
|
||||||
|
echo_writer.write('{0}'.format(line.decode()))
|
||||||
|
echo_writer.flush()
|
||||||
|
|
||||||
|
if is_killed:
|
||||||
|
break
|
||||||
|
finally:
|
||||||
|
reader.close()
|
||||||
|
|
||||||
self._active = True
|
self._active = True
|
||||||
with replace_environment(self.env):
|
with replace_environment(self.env):
|
||||||
@@ -837,7 +841,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||||||
self._ioflag = False
|
self._ioflag = False
|
||||||
else:
|
else:
|
||||||
self.writer.close()
|
self.writer.close()
|
||||||
self.reader.close()
|
|
||||||
self.echo_writer.flush()
|
self.echo_writer.flush()
|
||||||
self.stdout.flush()
|
self.stdout.flush()
|
||||||
self.stderr.flush()
|
self.stderr.flush()
|
||||||
@@ -853,10 +856,7 @@ def force_echo(self):
|
|||||||
if not self._active:
|
if not self._active:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Can't call force_echo() outside log_output region!")
|
"Can't call force_echo() outside log_output region!")
|
||||||
try:
|
yield
|
||||||
yield self
|
|
||||||
finally:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: (major, minor, micro, dev release) tuple
|
#: (major, minor, micro, dev release) tuple
|
||||||
spack_version_info = (0, 18, 0, 'dev0')
|
spack_version_info = (0, 19, 0, 'dev0')
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||||
|
|||||||
@@ -1,42 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
"""This package contains code for creating analyzers to extract Application
|
|
||||||
Binary Interface (ABI) information, along with simple analyses that just load
|
|
||||||
existing metadata.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack.paths
|
|
||||||
import spack.util.classes
|
|
||||||
|
|
||||||
mod_path = spack.paths.analyzers_path
|
|
||||||
analyzers = spack.util.classes.list_classes("spack.analyzers", mod_path)
|
|
||||||
|
|
||||||
# The base analyzer does not have a name, and cannot do dict comprehension
|
|
||||||
analyzer_types = {}
|
|
||||||
for a in analyzers:
|
|
||||||
if not hasattr(a, "name"):
|
|
||||||
continue
|
|
||||||
analyzer_types[a.name] = a
|
|
||||||
|
|
||||||
|
|
||||||
def list_all():
|
|
||||||
"""A helper function to list all analyzers and their descriptions
|
|
||||||
"""
|
|
||||||
for name, analyzer in analyzer_types.items():
|
|
||||||
print("%-25s: %-35s" % (name, analyzer.description))
|
|
||||||
|
|
||||||
|
|
||||||
def get_analyzer(name):
|
|
||||||
"""Courtesy function to retrieve an analyzer, and exit on error if it
|
|
||||||
does not exist.
|
|
||||||
"""
|
|
||||||
if name in analyzer_types:
|
|
||||||
return analyzer_types[name]
|
|
||||||
tty.die("Analyzer %s does not exist" % name)
|
|
||||||
@@ -1,116 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
"""An analyzer base provides basic functions to run the analysis, save results,
|
|
||||||
and (optionally) interact with a Spack Monitor
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack.config
|
|
||||||
import spack.hooks
|
|
||||||
import spack.monitor
|
|
||||||
import spack.util.path
|
|
||||||
|
|
||||||
|
|
||||||
def get_analyzer_dir(spec, analyzer_dir=None):
|
|
||||||
"""
|
|
||||||
Given a spec, return the directory to save analyzer results.
|
|
||||||
|
|
||||||
We create the directory if it does not exist. We also check that the
|
|
||||||
spec has an associated package. An analyzer cannot be run if the spec isn't
|
|
||||||
associated with a package. If the user provides a custom analyzer_dir,
|
|
||||||
we use it over checking the config and the default at ~/.spack/analyzers
|
|
||||||
"""
|
|
||||||
# An analyzer cannot be run if the spec isn't associated with a package
|
|
||||||
if not hasattr(spec, "package") or not spec.package:
|
|
||||||
tty.die("A spec can only be analyzed with an associated package.")
|
|
||||||
|
|
||||||
# The top level directory is in the user home, or a custom location
|
|
||||||
if not analyzer_dir:
|
|
||||||
analyzer_dir = spack.util.path.canonicalize_path(
|
|
||||||
spack.config.get('config:analyzers_dir', '~/.spack/analyzers'))
|
|
||||||
|
|
||||||
# We follow the same convention as the spec install (this could be better)
|
|
||||||
package_prefix = os.sep.join(spec.package.prefix.split('/')[-3:])
|
|
||||||
meta_dir = os.path.join(analyzer_dir, package_prefix)
|
|
||||||
return meta_dir
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyzerBase(object):
|
|
||||||
|
|
||||||
def __init__(self, spec, dirname=None):
|
|
||||||
"""
|
|
||||||
Verify that the analyzer has correct metadata.
|
|
||||||
|
|
||||||
An Analyzer is intended to run on one spec install, so the spec
|
|
||||||
with its associated package is required on init. The child analyzer
|
|
||||||
class should define an init function that super's the init here, and
|
|
||||||
also check that the analyzer has all dependencies that it
|
|
||||||
needs. If an analyzer subclass does not have dependencies, it does not
|
|
||||||
need to define an init. An Analyzer should not be allowed to proceed
|
|
||||||
if one or more dependencies are missing. The dirname, if defined,
|
|
||||||
is an optional directory name to save to (instead of the default meta
|
|
||||||
spack directory).
|
|
||||||
"""
|
|
||||||
self.spec = spec
|
|
||||||
self.dirname = dirname
|
|
||||||
self.meta_dir = os.path.dirname(spec.package.install_log_path)
|
|
||||||
|
|
||||||
for required in ["name", "outfile", "description"]:
|
|
||||||
if not hasattr(self, required):
|
|
||||||
tty.die("Please add a %s attribute on the analyzer." % required)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""
|
|
||||||
Given a spec with an installed package, run the analyzer on it.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output_dir(self):
|
|
||||||
"""
|
|
||||||
The full path to the output directory.
|
|
||||||
|
|
||||||
This includes the nested analyzer directory structure. This function
|
|
||||||
does not create anything.
|
|
||||||
"""
|
|
||||||
if not hasattr(self, "_output_dir"):
|
|
||||||
output_dir = get_analyzer_dir(self.spec, self.dirname)
|
|
||||||
self._output_dir = os.path.join(output_dir, self.name)
|
|
||||||
|
|
||||||
return self._output_dir
|
|
||||||
|
|
||||||
def save_result(self, result, overwrite=False):
|
|
||||||
"""
|
|
||||||
Save a result to the associated spack monitor, if defined.
|
|
||||||
|
|
||||||
This function is on the level of the analyzer because it might be
|
|
||||||
the case that the result is large (appropriate for a single request)
|
|
||||||
or that the data is organized differently (e.g., more than one
|
|
||||||
request per result). If an analyzer subclass needs to over-write
|
|
||||||
this function with a custom save, that is appropriate to do (see abi).
|
|
||||||
"""
|
|
||||||
# We maintain the structure in json with the analyzer as key so
|
|
||||||
# that in the future, we could upload to a monitor server
|
|
||||||
if result[self.name]:
|
|
||||||
|
|
||||||
outfile = os.path.join(self.output_dir, self.outfile)
|
|
||||||
|
|
||||||
# Only try to create the results directory if we have a result
|
|
||||||
if not os.path.exists(self._output_dir):
|
|
||||||
os.makedirs(self._output_dir)
|
|
||||||
|
|
||||||
# Don't overwrite an existing result if overwrite is False
|
|
||||||
if os.path.exists(outfile) and not overwrite:
|
|
||||||
tty.info("%s exists and overwrite is False, skipping." % outfile)
|
|
||||||
else:
|
|
||||||
tty.info("Writing result to %s" % outfile)
|
|
||||||
spack.monitor.write_json(result[self.name], outfile)
|
|
||||||
|
|
||||||
# This hook runs after a save result
|
|
||||||
spack.hooks.on_analyzer_save(self.spec.package, result)
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
"""A configargs analyzer is a class of analyzer that typically just uploads
|
|
||||||
already existing metadata about config args from a package spec install
|
|
||||||
directory."""
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import spack.monitor
|
|
||||||
|
|
||||||
from .analyzer_base import AnalyzerBase
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigArgs(AnalyzerBase):
|
|
||||||
|
|
||||||
name = "config_args"
|
|
||||||
outfile = "spack-analyzer-config-args.json"
|
|
||||||
description = "config args loaded from spack-configure-args.txt"
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""
|
|
||||||
Load the configure-args.txt and save in json.
|
|
||||||
|
|
||||||
The run function will find the spack-config-args.txt file in the
|
|
||||||
package install directory, and read it into a json structure that has
|
|
||||||
the name of the analyzer as the key.
|
|
||||||
"""
|
|
||||||
config_file = os.path.join(self.meta_dir, "spack-configure-args.txt")
|
|
||||||
return {self.name: spack.monitor.read_file(config_file)}
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
"""An environment analyzer will read and parse the environment variables
|
|
||||||
file in the installed package directory, generating a json file that has
|
|
||||||
an index of key, value pairs for environment variables."""
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
from spack.util.environment import EnvironmentModifications
|
|
||||||
|
|
||||||
from .analyzer_base import AnalyzerBase
|
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentVariables(AnalyzerBase):
|
|
||||||
|
|
||||||
name = "environment_variables"
|
|
||||||
outfile = "spack-analyzer-environment-variables.json"
|
|
||||||
description = "environment variables parsed from spack-build-env.txt"
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""
|
|
||||||
Load, parse, and save spack-build-env.txt to analyzers.
|
|
||||||
|
|
||||||
Read in the spack-build-env.txt file from the package install
|
|
||||||
directory and parse the environment variables into key value pairs.
|
|
||||||
The result should have the key for the analyzer, the name.
|
|
||||||
"""
|
|
||||||
env_file = os.path.join(self.meta_dir, "spack-build-env.txt")
|
|
||||||
return {self.name: self._read_environment_file(env_file)}
|
|
||||||
|
|
||||||
def _read_environment_file(self, filename):
|
|
||||||
"""
|
|
||||||
Read and parse the environment file.
|
|
||||||
|
|
||||||
Given an environment file, we want to read it, split by semicolons
|
|
||||||
and new lines, and then parse down to the subset of SPACK_* variables.
|
|
||||||
We assume that all spack prefix variables are not secrets, and unlike
|
|
||||||
the install_manifest.json, we don't (at least to start) parse the values
|
|
||||||
to remove path prefixes specific to user systems.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(filename):
|
|
||||||
tty.warn("No environment file available")
|
|
||||||
return
|
|
||||||
|
|
||||||
mods = EnvironmentModifications.from_sourcing_file(filename)
|
|
||||||
env = {}
|
|
||||||
mods.apply_modifications(env)
|
|
||||||
return env
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
"""The install files json file (install_manifest.json) already exists in
|
|
||||||
the package install folder, so this analyzer simply moves it to the user
|
|
||||||
analyzer folder for further processing."""
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import spack.monitor
|
|
||||||
|
|
||||||
from .analyzer_base import AnalyzerBase
|
|
||||||
|
|
||||||
|
|
||||||
class InstallFiles(AnalyzerBase):
|
|
||||||
|
|
||||||
name = "install_files"
|
|
||||||
outfile = "spack-analyzer-install-files.json"
|
|
||||||
description = "install file listing read from install_manifest.json"
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""
|
|
||||||
Load in the install_manifest.json and save to analyzers.
|
|
||||||
|
|
||||||
We write it out to the analyzers folder, with key as the analyzer name.
|
|
||||||
"""
|
|
||||||
manifest_file = os.path.join(self.meta_dir, "install_manifest.json")
|
|
||||||
return {self.name: spack.monitor.read_json(manifest_file)}
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import os
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.binary_distribution
|
|
||||||
import spack.bootstrap
|
|
||||||
import spack.error
|
|
||||||
import spack.hooks
|
|
||||||
import spack.monitor
|
|
||||||
import spack.package
|
|
||||||
import spack.repo
|
|
||||||
import spack.util.executable
|
|
||||||
|
|
||||||
from .analyzer_base import AnalyzerBase
|
|
||||||
|
|
||||||
|
|
||||||
class Libabigail(AnalyzerBase):
|
|
||||||
|
|
||||||
name = "libabigail"
|
|
||||||
outfile = "spack-analyzer-libabigail.json"
|
|
||||||
description = "Application Binary Interface (ABI) features for objects"
|
|
||||||
|
|
||||||
def __init__(self, spec, dirname=None):
|
|
||||||
"""
|
|
||||||
init for an analyzer ensures we have all needed dependencies.
|
|
||||||
|
|
||||||
For the libabigail analyzer, this means Libabigail.
|
|
||||||
Since the output for libabigail is one file per object, we communicate
|
|
||||||
with the monitor multiple times.
|
|
||||||
"""
|
|
||||||
super(Libabigail, self).__init__(spec, dirname)
|
|
||||||
|
|
||||||
# This doesn't seem to work to import on the module level
|
|
||||||
tty.debug("Preparing to use Libabigail, will install if missing.")
|
|
||||||
|
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
|
||||||
# libabigail won't install lib/bin/share without docs
|
|
||||||
spec = spack.spec.Spec("libabigail+docs")
|
|
||||||
spack.bootstrap.ensure_executables_in_path_or_raise(
|
|
||||||
["abidw"], abstract_spec=spec
|
|
||||||
)
|
|
||||||
self.abidw = spack.util.executable.which('abidw')
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""
|
|
||||||
Run libabigail, and save results to filename.
|
|
||||||
|
|
||||||
This run function differs in that we write as we generate and then
|
|
||||||
return a dict with the analyzer name as the key, and the value of a
|
|
||||||
dict of results, where the key is the object name, and the value is
|
|
||||||
the output file written to.
|
|
||||||
"""
|
|
||||||
manifest = spack.binary_distribution.get_buildfile_manifest(self.spec)
|
|
||||||
|
|
||||||
# This result will store a path to each file
|
|
||||||
result = {}
|
|
||||||
|
|
||||||
# Generate an output file for each binary or object
|
|
||||||
for obj in manifest.get("binary_to_relocate_fullpath", []):
|
|
||||||
|
|
||||||
# We want to preserve the path in the install directory in case
|
|
||||||
# a library has an equivalenly named lib or executable, for example
|
|
||||||
outdir = os.path.dirname(obj.replace(self.spec.package.prefix,
|
|
||||||
'').strip(os.path.sep))
|
|
||||||
outfile = "spack-analyzer-libabigail-%s.xml" % os.path.basename(obj)
|
|
||||||
outfile = os.path.join(self.output_dir, outdir, outfile)
|
|
||||||
outdir = os.path.dirname(outfile)
|
|
||||||
|
|
||||||
# Create the output directory
|
|
||||||
if not os.path.exists(outdir):
|
|
||||||
os.makedirs(outdir)
|
|
||||||
|
|
||||||
# Sometimes libabigail segfaults and dumps
|
|
||||||
try:
|
|
||||||
self.abidw(obj, "--out-file", outfile)
|
|
||||||
result[obj] = outfile
|
|
||||||
tty.info("Writing result to %s" % outfile)
|
|
||||||
except spack.error.SpackError:
|
|
||||||
tty.warn("Issue running abidw for %s" % obj)
|
|
||||||
|
|
||||||
return {self.name: result}
|
|
||||||
|
|
||||||
def save_result(self, result, overwrite=False):
|
|
||||||
"""
|
|
||||||
Read saved ABI results and upload to monitor server.
|
|
||||||
|
|
||||||
ABI results are saved to individual files, so each one needs to be
|
|
||||||
read and uploaded. Result here should be the lookup generated in run(),
|
|
||||||
the key is the analyzer name, and each value is the result file.
|
|
||||||
We currently upload the entire xml as text because libabigail can't
|
|
||||||
easily read gzipped xml, but this will be updated when it can.
|
|
||||||
"""
|
|
||||||
if not spack.monitor.cli:
|
|
||||||
return
|
|
||||||
|
|
||||||
name = self.spec.package.name
|
|
||||||
|
|
||||||
for obj, filename in result.get(self.name, {}).items():
|
|
||||||
|
|
||||||
# Don't include the prefix
|
|
||||||
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
|
||||||
|
|
||||||
# We've already saved the results to file during run
|
|
||||||
content = spack.monitor.read_file(filename)
|
|
||||||
|
|
||||||
# A result needs an analyzer, value or binary_value, and name
|
|
||||||
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
|
||||||
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
|
||||||
spack.hooks.on_analyzer_save(self.spec.package, {"libabigail": [data]})
|
|
||||||
@@ -276,17 +276,36 @@ def _search_duplicate_specs_in_externals(error_cls):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@package_directives
|
||||||
|
def _check_build_test_callbacks(pkgs, error_cls):
|
||||||
|
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||||
|
errors = []
|
||||||
|
for pkg_name in pkgs:
|
||||||
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
|
test_callbacks = pkg_cls.build_time_test_callbacks
|
||||||
|
|
||||||
|
if test_callbacks and 'test' in test_callbacks:
|
||||||
|
msg = ('{0} package contains "test" method in '
|
||||||
|
'build_time_test_callbacks')
|
||||||
|
instr = ('Remove "test" from: [{0}]'
|
||||||
|
.format(', '.join(test_callbacks)))
|
||||||
|
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@package_directives
|
@package_directives
|
||||||
def _check_patch_urls(pkgs, error_cls):
|
def _check_patch_urls(pkgs, error_cls):
|
||||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||||
github_patch_url_re = (
|
github_patch_url_re = (
|
||||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
|
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||||
)
|
)
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg = spack.repo.get(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
for condition, patches in pkg.patches.items():
|
for condition, patches in pkg_cls.patches.items():
|
||||||
for patch in patches:
|
for patch in patches:
|
||||||
if not isinstance(patch, spack.patch.UrlPatch):
|
if not isinstance(patch, spack.patch.UrlPatch):
|
||||||
continue
|
continue
|
||||||
@@ -298,7 +317,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not patch.url.endswith(full_index_arg):
|
if not patch.url.endswith(full_index_arg):
|
||||||
errors.append(error_cls(
|
errors.append(error_cls(
|
||||||
"patch URL in package {0} must end with {1}".format(
|
"patch URL in package {0} must end with {1}".format(
|
||||||
pkg.name, full_index_arg,
|
pkg_cls.name, full_index_arg,
|
||||||
),
|
),
|
||||||
[patch.url],
|
[patch.url],
|
||||||
))
|
))
|
||||||
@@ -312,21 +331,21 @@ def _linting_package_file(pkgs, error_cls):
|
|||||||
"""
|
"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg = spack.repo.get(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
|
|
||||||
# Does the homepage have http, and if so, does https work?
|
# Does the homepage have http, and if so, does https work?
|
||||||
if pkg.homepage.startswith('http://'):
|
if pkg_cls.homepage.startswith('http://'):
|
||||||
https = re.sub("http", "https", pkg.homepage, 1)
|
https = re.sub("http", "https", pkg_cls.homepage, 1)
|
||||||
try:
|
try:
|
||||||
response = urlopen(https)
|
response = urlopen(https)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = 'Error with attempting https for "{0}": '
|
msg = 'Error with attempting https for "{0}": '
|
||||||
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
errors.append(error_cls(msg.format(pkg_cls.name), [str(e)]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if response.getcode() == 200:
|
if response.getcode() == 200:
|
||||||
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
||||||
errors.append(msg.format(pkg.name))
|
errors.append(msg.format(pkg_cls.name))
|
||||||
|
|
||||||
return llnl.util.lang.dedupe(errors)
|
return llnl.util.lang.dedupe(errors)
|
||||||
|
|
||||||
@@ -336,10 +355,10 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
|||||||
"""Report unknown or wrong variants in directives for this package"""
|
"""Report unknown or wrong variants in directives for this package"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg = spack.repo.get(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
|
|
||||||
# Check "conflicts" directive
|
# Check "conflicts" directive
|
||||||
for conflict, triggers in pkg.conflicts.items():
|
for conflict, triggers in pkg_cls.conflicts.items():
|
||||||
for trigger, _ in triggers:
|
for trigger, _ in triggers:
|
||||||
vrn = spack.spec.Spec(conflict)
|
vrn = spack.spec.Spec(conflict)
|
||||||
try:
|
try:
|
||||||
@@ -352,34 +371,34 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
|||||||
# When os and target constraints can be created independently of
|
# When os and target constraints can be created independently of
|
||||||
# the platform, TODO change this back to add an error.
|
# the platform, TODO change this back to add an error.
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg, spack.spec.Spec(trigger),
|
pkg_cls, spack.spec.Spec(trigger),
|
||||||
directive='conflicts', error_cls=error_cls
|
directive='conflicts', error_cls=error_cls
|
||||||
))
|
))
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg, vrn, directive='conflicts', error_cls=error_cls
|
pkg_cls, vrn, directive='conflicts', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
# Check "depends_on" directive
|
# Check "depends_on" directive
|
||||||
for _, triggers in pkg.dependencies.items():
|
for _, triggers in pkg_cls.dependencies.items():
|
||||||
triggers = list(triggers)
|
triggers = list(triggers)
|
||||||
for trigger in list(triggers):
|
for trigger in list(triggers):
|
||||||
vrn = spack.spec.Spec(trigger)
|
vrn = spack.spec.Spec(trigger)
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg, vrn, directive='depends_on', error_cls=error_cls
|
pkg_cls, vrn, directive='depends_on', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
# Check "patch" directive
|
# Check "patch" directive
|
||||||
for _, triggers in pkg.provided.items():
|
for _, triggers in pkg_cls.provided.items():
|
||||||
triggers = [spack.spec.Spec(x) for x in triggers]
|
triggers = [spack.spec.Spec(x) for x in triggers]
|
||||||
for vrn in triggers:
|
for vrn in triggers:
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg, vrn, directive='patch', error_cls=error_cls
|
pkg_cls, vrn, directive='patch', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
# Check "resource" directive
|
# Check "resource" directive
|
||||||
for vrn in pkg.resources:
|
for vrn in pkg_cls.resources:
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg, vrn, directive='resource', error_cls=error_cls
|
pkg_cls, vrn, directive='resource', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
return llnl.util.lang.dedupe(errors)
|
return llnl.util.lang.dedupe(errors)
|
||||||
@@ -390,15 +409,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
|||||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg = spack.repo.get(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||||
# No need to analyze virtual packages
|
# No need to analyze virtual packages
|
||||||
if spack.repo.path.is_virtual(dependency_name):
|
if spack.repo.path.is_virtual(dependency_name):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dependency_pkg = spack.repo.get(dependency_name)
|
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
# This dependency is completely missing, so report
|
# This dependency is completely missing, so report
|
||||||
# and continue the analysis
|
# and continue the analysis
|
||||||
@@ -414,8 +433,8 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
|||||||
dependency_variants = dependency_edge.spec.variants
|
dependency_variants = dependency_edge.spec.variants
|
||||||
for name, value in dependency_variants.items():
|
for name, value in dependency_variants.items():
|
||||||
try:
|
try:
|
||||||
v, _ = dependency_pkg.variants[name]
|
v, _ = dependency_pkg_cls.variants[name]
|
||||||
v.validate_or_raise(value, pkg=dependency_pkg)
|
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
summary = (pkg_name + ": wrong variant used for a "
|
summary = (pkg_name + ": wrong variant used for a "
|
||||||
"dependency in a 'depends_on' directive")
|
"dependency in a 'depends_on' directive")
|
||||||
@@ -437,10 +456,10 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
|||||||
"""Report if version constraints used in directives are not satisfiable"""
|
"""Report if version constraints used in directives are not satisfiable"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg = spack.repo.get(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||||
dependencies_to_check = []
|
dependencies_to_check = []
|
||||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||||
# Skip virtual dependencies for the time being, check on
|
# Skip virtual dependencies for the time being, check on
|
||||||
# their versions can be added later
|
# their versions can be added later
|
||||||
if spack.repo.path.is_virtual(dependency_name):
|
if spack.repo.path.is_virtual(dependency_name):
|
||||||
@@ -451,19 +470,19 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
|||||||
)
|
)
|
||||||
|
|
||||||
for s in dependencies_to_check:
|
for s in dependencies_to_check:
|
||||||
dependency_pkg = None
|
dependency_pkg_cls = None
|
||||||
try:
|
try:
|
||||||
dependency_pkg = spack.repo.get(s.name)
|
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||||
assert any(
|
assert any(
|
||||||
v.satisfies(s.versions) for v in list(dependency_pkg.versions)
|
v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions)
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
summary = ("{0}: dependency on {1} cannot be satisfied "
|
summary = ("{0}: dependency on {1} cannot be satisfied "
|
||||||
"by known versions of {1.name}").format(pkg_name, s)
|
"by known versions of {1.name}").format(pkg_name, s)
|
||||||
details = ['happening in ' + filename]
|
details = ['happening in ' + filename]
|
||||||
if dependency_pkg is not None:
|
if dependency_pkg_cls is not None:
|
||||||
details.append('known versions of {0.name} are {1}'.format(
|
details.append('known versions of {0.name} are {1}'.format(
|
||||||
s, ', '.join([str(x) for x in dependency_pkg.versions])
|
s, ', '.join([str(x) for x in dependency_pkg_cls.versions])
|
||||||
))
|
))
|
||||||
errors.append(error_cls(summary=summary, details=details))
|
errors.append(error_cls(summary=summary, details=details))
|
||||||
|
|
||||||
@@ -481,7 +500,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
|||||||
for name, v in constraint.variants.items():
|
for name, v in constraint.variants.items():
|
||||||
try:
|
try:
|
||||||
variant, _ = pkg.variants[name]
|
variant, _ = pkg.variants[name]
|
||||||
variant.validate_or_raise(v, pkg=pkg)
|
variant.validate_or_raise(v, pkg_cls=pkg)
|
||||||
except variant_exceptions as e:
|
except variant_exceptions as e:
|
||||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||||
summary = summary.format(directive)
|
summary = summary.format(directive)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import copy
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
@@ -21,6 +22,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import GroupedExceptionHandler
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -36,6 +38,11 @@
|
|||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
|
import spack.util.spack_yaml
|
||||||
|
import spack.util.url
|
||||||
|
|
||||||
|
#: Name of the file containing metadata about the bootstrapping source
|
||||||
|
METADATA_YAML_FILENAME = 'metadata.yaml'
|
||||||
|
|
||||||
#: Map a bootstrapper type to the corresponding class
|
#: Map a bootstrapper type to the corresponding class
|
||||||
_bootstrap_methods = {}
|
_bootstrap_methods = {}
|
||||||
@@ -73,32 +80,41 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
|||||||
|
|
||||||
for candidate_spec in installed_specs:
|
for candidate_spec in installed_specs:
|
||||||
pkg = candidate_spec['python'].package
|
pkg = candidate_spec['python'].package
|
||||||
module_paths = {
|
module_paths = [
|
||||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||||
}
|
] # type: list[str]
|
||||||
sys.path.extend(module_paths)
|
path_before = list(sys.path)
|
||||||
|
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||||
|
# to be picked up and used, possibly depending on something in the store, first
|
||||||
|
# allows the bootstrap version to work when an incompatible version is in
|
||||||
|
# sys.path
|
||||||
|
orders = [
|
||||||
|
module_paths + sys.path,
|
||||||
|
sys.path + module_paths,
|
||||||
|
]
|
||||||
|
for path in orders:
|
||||||
|
sys.path = path
|
||||||
|
try:
|
||||||
|
_fix_ext_suffix(candidate_spec)
|
||||||
|
if _python_import(module):
|
||||||
|
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||||
|
'provides the "{0}" Python module').format(
|
||||||
|
module, query_spec, candidate_spec.dag_hash()
|
||||||
|
)
|
||||||
|
tty.debug(msg)
|
||||||
|
if query_info is not None:
|
||||||
|
query_info['spec'] = candidate_spec
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
msg = ('unexpected error while trying to import module '
|
||||||
|
'"{0}" from spec "{1}" [error="{2}"]')
|
||||||
|
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||||
|
else:
|
||||||
|
msg = "Spec {0} did not provide module {1}"
|
||||||
|
tty.warn(msg.format(candidate_spec, module))
|
||||||
|
|
||||||
try:
|
sys.path = path_before
|
||||||
_fix_ext_suffix(candidate_spec)
|
|
||||||
if _python_import(module):
|
|
||||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
|
||||||
'provides the "{0}" Python module').format(
|
|
||||||
module, query_spec, candidate_spec.dag_hash()
|
|
||||||
)
|
|
||||||
tty.debug(msg)
|
|
||||||
if query_info is not None:
|
|
||||||
query_info['spec'] = candidate_spec
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
msg = ('unexpected error while trying to import module '
|
|
||||||
'"{0}" from spec "{1}" [error="{2}"]')
|
|
||||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
|
||||||
else:
|
|
||||||
msg = "Spec {0} did not provide module {1}"
|
|
||||||
tty.warn(msg.format(candidate_spec, module))
|
|
||||||
|
|
||||||
sys.path = sys.path[:-3]
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -203,12 +219,43 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@_bootstrapper(type='buildcache')
|
class _BootstrapperBase(object):
|
||||||
class _BuildcacheBootstrapper(object):
|
"""Base class to derive types that can bootstrap software for Spack"""
|
||||||
"""Install the software needed during bootstrapping from a buildcache."""
|
config_scope_name = ''
|
||||||
|
|
||||||
def __init__(self, conf):
|
def __init__(self, conf):
|
||||||
self.name = conf['name']
|
self.name = conf['name']
|
||||||
self.url = conf['info']['url']
|
self.url = conf['info']['url']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mirror_url(self):
|
||||||
|
# Absolute paths
|
||||||
|
if os.path.isabs(self.url):
|
||||||
|
return spack.util.url.format(self.url)
|
||||||
|
|
||||||
|
# Check for :// and assume it's an url if we find it
|
||||||
|
if '://' in self.url:
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
# Otherwise, it's a relative path
|
||||||
|
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mirror_scope(self):
|
||||||
|
return spack.config.InternalConfigScope(
|
||||||
|
self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@_bootstrapper(type='buildcache')
|
||||||
|
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||||
|
"""Install the software needed during bootstrapping from a buildcache."""
|
||||||
|
|
||||||
|
config_scope_name = 'bootstrap_buildcache'
|
||||||
|
|
||||||
|
def __init__(self, conf):
|
||||||
|
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||||
|
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||||
self.last_search = None
|
self.last_search = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -231,9 +278,8 @@ def _spec_and_platform(abstract_spec_str):
|
|||||||
def _read_metadata(self, package_name):
|
def _read_metadata(self, package_name):
|
||||||
"""Return metadata about the given package."""
|
"""Return metadata about the given package."""
|
||||||
json_filename = '{0}.json'.format(package_name)
|
json_filename = '{0}.json'.format(package_name)
|
||||||
json_path = os.path.join(
|
json_dir = self.metadata_dir
|
||||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
json_path = os.path.join(json_dir, json_filename)
|
||||||
)
|
|
||||||
with open(json_path) as f:
|
with open(json_path) as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
return data
|
return data
|
||||||
@@ -307,12 +353,6 @@ def _install_and_test(
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@property
|
|
||||||
def mirror_scope(self):
|
|
||||||
return spack.config.InternalConfigScope(
|
|
||||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
|
||||||
)
|
|
||||||
|
|
||||||
def try_import(self, module, abstract_spec_str):
|
def try_import(self, module, abstract_spec_str):
|
||||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
@@ -342,9 +382,13 @@ def try_search_path(self, executables, abstract_spec_str):
|
|||||||
|
|
||||||
|
|
||||||
@_bootstrapper(type='install')
|
@_bootstrapper(type='install')
|
||||||
class _SourceBootstrapper(object):
|
class _SourceBootstrapper(_BootstrapperBase):
|
||||||
"""Install the software needed during bootstrapping from sources."""
|
"""Install the software needed during bootstrapping from sources."""
|
||||||
|
config_scope_name = 'bootstrap_source'
|
||||||
|
|
||||||
def __init__(self, conf):
|
def __init__(self, conf):
|
||||||
|
super(_SourceBootstrapper, self).__init__(conf)
|
||||||
|
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||||
self.conf = conf
|
self.conf = conf
|
||||||
self.last_search = None
|
self.last_search = None
|
||||||
|
|
||||||
@@ -377,7 +421,8 @@ def try_import(self, module, abstract_spec_str):
|
|||||||
tty.debug(msg.format(module, abstract_spec_str))
|
tty.debug(msg.format(module, abstract_spec_str))
|
||||||
|
|
||||||
# Install the spec that should make the module importable
|
# Install the spec that should make the module importable
|
||||||
concrete_spec.package.do_install(fail_fast=True)
|
with spack.config.override(self.mirror_scope):
|
||||||
|
concrete_spec.package.do_install(fail_fast=True)
|
||||||
|
|
||||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -390,6 +435,8 @@ def try_search_path(self, executables, abstract_spec_str):
|
|||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
tty.info("Bootstrapping {0} from sources".format(abstract_spec_str))
|
||||||
|
|
||||||
# If we compile code from sources detecting a few build tools
|
# If we compile code from sources detecting a few build tools
|
||||||
# might reduce compilation time by a fair amount
|
# might reduce compilation time by a fair amount
|
||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
@@ -402,7 +449,8 @@ def try_search_path(self, executables, abstract_spec_str):
|
|||||||
|
|
||||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
tty.debug(msg.format(abstract_spec_str))
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
concrete_spec.package.do_install()
|
with spack.config.override(self.mirror_scope):
|
||||||
|
concrete_spec.package.do_install()
|
||||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -417,11 +465,11 @@ def _make_bootstrapper(conf):
|
|||||||
return _bootstrap_methods[btype](conf)
|
return _bootstrap_methods[btype](conf)
|
||||||
|
|
||||||
|
|
||||||
def _source_is_trusted(conf):
|
def source_is_enabled_or_raise(conf):
|
||||||
|
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||||
if name not in trusted:
|
if not trusted.get(name, False):
|
||||||
return False
|
raise ValueError('source is not trusted')
|
||||||
return trusted[name]
|
|
||||||
|
|
||||||
|
|
||||||
def spec_for_current_python():
|
def spec_for_current_python():
|
||||||
@@ -486,36 +534,26 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
abstract_spec = abstract_spec or module
|
abstract_spec = abstract_spec or module
|
||||||
source_configs = spack.config.get('bootstrap:sources', [])
|
|
||||||
|
|
||||||
errors = {}
|
h = GroupedExceptionHandler()
|
||||||
|
|
||||||
for current_config in source_configs:
|
for current_config in bootstrapping_sources():
|
||||||
if not _source_is_trusted(current_config):
|
with h.forward(current_config['name']):
|
||||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
source_is_enabled_or_raise(current_config)
|
||||||
'not trusted').format(module, current_config['name'])
|
|
||||||
tty.debug(msg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
b = _make_bootstrapper(current_config)
|
b = _make_bootstrapper(current_config)
|
||||||
try:
|
|
||||||
if b.try_import(module, abstract_spec):
|
if b.try_import(module, abstract_spec):
|
||||||
return
|
return
|
||||||
except Exception as e:
|
|
||||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
|
||||||
tty.debug(msg.format(module, str(e)))
|
|
||||||
errors[current_config['name']] = e
|
|
||||||
|
|
||||||
# We couldn't import in any way, so raise an import error
|
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||||
msg += ' due to the following failures:\n'
|
if tty.is_debug():
|
||||||
for method in errors:
|
msg += h.grouped_message(with_tracebacks=True)
|
||||||
err = errors[method]
|
else:
|
||||||
msg += " '{0}' raised {1}: {2}\n".format(
|
msg += h.grouped_message(with_tracebacks=False)
|
||||||
method, err.__class__.__name__, str(err))
|
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
|
||||||
raise ImportError(msg)
|
raise ImportError(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -538,16 +576,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
executables_str = ', '.join(executables)
|
executables_str = ', '.join(executables)
|
||||||
source_configs = spack.config.get('bootstrap:sources', [])
|
|
||||||
for current_config in source_configs:
|
|
||||||
if not _source_is_trusted(current_config):
|
|
||||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
|
||||||
'not trusted').format(executables_str, current_config['name'])
|
|
||||||
tty.debug(msg)
|
|
||||||
continue
|
|
||||||
|
|
||||||
b = _make_bootstrapper(current_config)
|
h = GroupedExceptionHandler()
|
||||||
try:
|
|
||||||
|
for current_config in bootstrapping_sources():
|
||||||
|
with h.forward(current_config['name']):
|
||||||
|
source_is_enabled_or_raise(current_config)
|
||||||
|
|
||||||
|
b = _make_bootstrapper(current_config)
|
||||||
if b.try_search_path(executables, abstract_spec):
|
if b.try_search_path(executables, abstract_spec):
|
||||||
# Additional environment variables needed
|
# Additional environment variables needed
|
||||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||||
@@ -562,14 +598,16 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
|||||||
)
|
)
|
||||||
cmd.add_default_envmod(env_mods)
|
cmd.add_default_envmod(env_mods)
|
||||||
return cmd
|
return cmd
|
||||||
except Exception as e:
|
|
||||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
|
||||||
tty.debug(msg.format(executables_str, str(e)))
|
|
||||||
|
|
||||||
# We couldn't import in any way, so raise an import error
|
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||||
|
if tty.is_debug():
|
||||||
|
msg += h.grouped_message(with_tracebacks=True)
|
||||||
|
else:
|
||||||
|
msg += h.grouped_message(with_tracebacks=False)
|
||||||
|
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -614,10 +652,10 @@ def _add_compilers_if_missing():
|
|||||||
def _add_externals_if_missing():
|
def _add_externals_if_missing():
|
||||||
search_list = [
|
search_list = [
|
||||||
# clingo
|
# clingo
|
||||||
spack.repo.path.get('cmake'),
|
spack.repo.path.get_pkg_class('cmake'),
|
||||||
spack.repo.path.get('bison'),
|
spack.repo.path.get_pkg_class('bison'),
|
||||||
# GnuPG
|
# GnuPG
|
||||||
spack.repo.path.get('gawk')
|
spack.repo.path.get_pkg_class('gawk')
|
||||||
]
|
]
|
||||||
detected_packages = spack.detection.by_executable(search_list)
|
detected_packages = spack.detection.by_executable(search_list)
|
||||||
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
||||||
@@ -826,6 +864,19 @@ def ensure_flake8_in_path_or_raise():
|
|||||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||||
|
|
||||||
|
|
||||||
|
def all_root_specs(development=False):
|
||||||
|
"""Return a list of all the root specs that may be used to bootstrap Spack.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
development (bool): if True include dev dependencies
|
||||||
|
"""
|
||||||
|
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||||
|
if development:
|
||||||
|
specs += [isort_root_spec(), mypy_root_spec(),
|
||||||
|
black_root_spec(), flake8_root_spec()]
|
||||||
|
return specs
|
||||||
|
|
||||||
|
|
||||||
def _missing(name, purpose, system_only=True):
|
def _missing(name, purpose, system_only=True):
|
||||||
"""Message to be printed if an executable is not found"""
|
"""Message to be printed if an executable is not found"""
|
||||||
msg = '[{2}] MISSING "{0}": {1}'
|
msg = '[{2}] MISSING "{0}": {1}'
|
||||||
@@ -963,3 +1014,23 @@ def status_message(section):
|
|||||||
msg += '\n'
|
msg += '\n'
|
||||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||||
return msg, missing_software
|
return msg, missing_software
|
||||||
|
|
||||||
|
|
||||||
|
def bootstrapping_sources(scope=None):
|
||||||
|
"""Return the list of configured sources of software for bootstrapping Spack
|
||||||
|
|
||||||
|
Args:
|
||||||
|
scope (str or None): if a valid configuration scope is given, return the
|
||||||
|
list only from that scope
|
||||||
|
"""
|
||||||
|
source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
|
||||||
|
source_configs = source_configs or []
|
||||||
|
list_of_sources = []
|
||||||
|
for entry in source_configs:
|
||||||
|
current = copy.copy(entry)
|
||||||
|
metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
|
||||||
|
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||||
|
with open(metadata_yaml) as f:
|
||||||
|
current.update(spack.util.spack_yaml.load(f))
|
||||||
|
list_of_sources.append(current)
|
||||||
|
return list_of_sources
|
||||||
|
|||||||
@@ -55,7 +55,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.package
|
import spack.package_base
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
@@ -111,6 +111,20 @@
|
|||||||
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||||
|
|
||||||
|
|
||||||
|
def should_set_parallel_jobs(jobserver_support=False):
|
||||||
|
"""Returns true in general, except when:
|
||||||
|
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set
|
||||||
|
- jobserver_support is enabled, and a jobserver was found.
|
||||||
|
"""
|
||||||
|
if (
|
||||||
|
jobserver_support and
|
||||||
|
'MAKEFLAGS' in os.environ and
|
||||||
|
'--jobserver' in os.environ['MAKEFLAGS']
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
return not env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||||
|
|
||||||
|
|
||||||
class MakeExecutable(Executable):
|
class MakeExecutable(Executable):
|
||||||
"""Special callable executable object for make so the user can specify
|
"""Special callable executable object for make so the user can specify
|
||||||
parallelism options on a per-invocation basis. Specifying
|
parallelism options on a per-invocation basis. Specifying
|
||||||
@@ -120,9 +134,6 @@ class MakeExecutable(Executable):
|
|||||||
call will name an environment variable which will be set to the
|
call will name an environment variable which will be set to the
|
||||||
parallelism level (without affecting the normal invocation with
|
parallelism level (without affecting the normal invocation with
|
||||||
-j).
|
-j).
|
||||||
|
|
||||||
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
|
|
||||||
everything.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name, jobs):
|
def __init__(self, name, jobs):
|
||||||
@@ -133,9 +144,8 @@ def __call__(self, *args, **kwargs):
|
|||||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||||
remaining arguments are passed through to the superclass.
|
remaining arguments are passed through to the superclass.
|
||||||
"""
|
"""
|
||||||
|
parallel = should_set_parallel_jobs(jobserver_support=True) and \
|
||||||
disable = env_flag(SPACK_NO_PARALLEL_MAKE)
|
kwargs.pop('parallel', self.jobs > 1)
|
||||||
parallel = (not disable) and kwargs.pop('parallel', self.jobs > 1)
|
|
||||||
|
|
||||||
if parallel:
|
if parallel:
|
||||||
args = ('-j{0}'.format(self.jobs),) + args
|
args = ('-j{0}'.format(self.jobs),) + args
|
||||||
@@ -181,7 +191,7 @@ def clean_environment():
|
|||||||
env.unset('PYTHONPATH')
|
env.unset('PYTHONPATH')
|
||||||
|
|
||||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||||
env.unset('MAKEFLAGS')
|
# env.unset('MAKEFLAGS')
|
||||||
|
|
||||||
# Avoid that libraries of build dependencies get hijacked.
|
# Avoid that libraries of build dependencies get hijacked.
|
||||||
env.unset('LD_PRELOAD')
|
env.unset('LD_PRELOAD')
|
||||||
@@ -712,7 +722,7 @@ def get_std_cmake_args(pkg):
|
|||||||
package were a CMakePackage instance.
|
package were a CMakePackage instance.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
pkg (spack.package.PackageBase): package under consideration
|
pkg (spack.package_base.PackageBase): package under consideration
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list: arguments for cmake
|
list: arguments for cmake
|
||||||
@@ -728,7 +738,7 @@ def get_std_meson_args(pkg):
|
|||||||
package were a MesonPackage instance.
|
package were a MesonPackage instance.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
pkg (spack.package.PackageBase): package under consideration
|
pkg (spack.package_base.PackageBase): package under consideration
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list: arguments for meson
|
list: arguments for meson
|
||||||
@@ -738,12 +748,12 @@ def get_std_meson_args(pkg):
|
|||||||
|
|
||||||
def parent_class_modules(cls):
|
def parent_class_modules(cls):
|
||||||
"""
|
"""
|
||||||
Get list of superclass modules that descend from spack.package.PackageBase
|
Get list of superclass modules that descend from spack.package_base.PackageBase
|
||||||
|
|
||||||
Includes cls.__module__
|
Includes cls.__module__
|
||||||
"""
|
"""
|
||||||
if (not issubclass(cls, spack.package.PackageBase) or
|
if (not issubclass(cls, spack.package_base.PackageBase) or
|
||||||
issubclass(spack.package.PackageBase, cls)):
|
issubclass(spack.package_base.PackageBase, cls)):
|
||||||
return []
|
return []
|
||||||
result = []
|
result = []
|
||||||
module = sys.modules.get(cls.__module__)
|
module = sys.modules.get(cls.__module__)
|
||||||
@@ -761,7 +771,7 @@ def load_external_modules(pkg):
|
|||||||
associated with them.
|
associated with them.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
pkg (spack.package.PackageBase): package to load deps for
|
pkg (spack.package_base.PackageBase): package to load deps for
|
||||||
"""
|
"""
|
||||||
for dep in list(pkg.spec.traverse()):
|
for dep in list(pkg.spec.traverse()):
|
||||||
external_modules = dep.external_modules or []
|
external_modules = dep.external_modules or []
|
||||||
@@ -829,7 +839,7 @@ def setup_package(pkg, dirty, context='build'):
|
|||||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||||
# unnecessary
|
# unnecessary
|
||||||
on_cray, _ = _on_cray()
|
on_cray, _ = _on_cray()
|
||||||
if on_cray:
|
if on_cray and not dirty:
|
||||||
for mod in ['cray-mpich', 'cray-libsci']:
|
for mod in ['cray-mpich', 'cray-libsci']:
|
||||||
module('unload', mod)
|
module('unload', mod)
|
||||||
|
|
||||||
@@ -1028,7 +1038,7 @@ def get_cmake_prefix_path(pkg):
|
|||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||||
input_multiprocess_fd):
|
input_multiprocess_fd, jsfd1, jsfd2):
|
||||||
|
|
||||||
context = kwargs.get('context', 'build')
|
context = kwargs.get('context', 'build')
|
||||||
|
|
||||||
@@ -1099,7 +1109,7 @@ def start_build_process(pkg, function, kwargs):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
pkg (spack.package.PackageBase): package whose environment we should set up the
|
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||||
child process for.
|
child process for.
|
||||||
function (typing.Callable): argless function to run in the child
|
function (typing.Callable): argless function to run in the child
|
||||||
process.
|
process.
|
||||||
@@ -1135,6 +1145,8 @@ def child_fun():
|
|||||||
"""
|
"""
|
||||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||||
input_multiprocess_fd = None
|
input_multiprocess_fd = None
|
||||||
|
jobserver_fd1 = None
|
||||||
|
jobserver_fd2 = None
|
||||||
|
|
||||||
serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg)
|
serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg)
|
||||||
|
|
||||||
@@ -1144,11 +1156,17 @@ def child_fun():
|
|||||||
'fileno'):
|
'fileno'):
|
||||||
input_fd = os.dup(sys.stdin.fileno())
|
input_fd = os.dup(sys.stdin.fileno())
|
||||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||||
|
mflags = os.environ.get('MAKEFLAGS', False)
|
||||||
|
if mflags:
|
||||||
|
m = re.search(r'--jobserver-[^=]*=(\d),(\d)', mflags)
|
||||||
|
if m:
|
||||||
|
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||||
|
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||||
|
|
||||||
p = multiprocessing.Process(
|
p = multiprocessing.Process(
|
||||||
target=_setup_pkg_and_run,
|
target=_setup_pkg_and_run,
|
||||||
args=(serialized_pkg, function, kwargs, child_pipe,
|
args=(serialized_pkg, function, kwargs, child_pipe,
|
||||||
input_multiprocess_fd))
|
input_multiprocess_fd, jobserver_fd1, jobserver_fd2))
|
||||||
|
|
||||||
p.start()
|
p.start()
|
||||||
|
|
||||||
@@ -1216,7 +1234,7 @@ def make_stack(tb, stack=None):
|
|||||||
if 'self' in frame.f_locals:
|
if 'self' in frame.f_locals:
|
||||||
# Find the first proper subclass of PackageBase.
|
# Find the first proper subclass of PackageBase.
|
||||||
obj = frame.f_locals['self']
|
obj = frame.f_locals['self']
|
||||||
if isinstance(obj, spack.package.PackageBase):
|
if isinstance(obj, spack.package_base.PackageBase):
|
||||||
break
|
break
|
||||||
|
|
||||||
# We found obj, the Package implementation we care about.
|
# We found obj, the Package implementation we care about.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
from spack.build_systems.autotools import AutotoolsPackage
|
from spack.build_systems.autotools import AutotoolsPackage
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package import ExtensionError
|
from spack.package_base import ExtensionError
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
from spack.build_environment import InstallError
|
from spack.build_environment import InstallError
|
||||||
from spack.directives import conflicts, depends_on
|
from spack.directives import conflicts, depends_on
|
||||||
from spack.operating_systems.mac_os import macos_version
|
from spack.operating_systems.mac_os import macos_version
|
||||||
from spack.package import PackageBase, run_after, run_before
|
from spack.package_base import PackageBase, run_after, run_before
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
from llnl.util.filesystem import install, mkdirp
|
from llnl.util.filesystem import install, mkdirp
|
||||||
|
|
||||||
from spack.build_systems.cmake import CMakePackage
|
from spack.build_systems.cmake import CMakePackage
|
||||||
from spack.package import run_after
|
from spack.package_base import run_after
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_path(name, value, comment=""):
|
def cmake_cache_path(name, value, comment=""):
|
||||||
@@ -210,6 +210,10 @@ def std_initconfig_entries(self):
|
|||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def initconfig_package_entries(self):
|
||||||
|
"""This method is to be overwritten by the package"""
|
||||||
|
return []
|
||||||
|
|
||||||
def initconfig(self, spec, prefix):
|
def initconfig(self, spec, prefix):
|
||||||
cache_entries = (self.std_initconfig_entries() +
|
cache_entries = (self.std_initconfig_entries() +
|
||||||
self.initconfig_compiler_entries() +
|
self.initconfig_compiler_entries() +
|
||||||
|
|||||||
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
from spack.directives import conflicts, depends_on, variant
|
from spack.directives import conflicts, depends_on, variant
|
||||||
from spack.package import InstallError, PackageBase, run_after
|
from spack.package_base import InstallError, PackageBase, run_after
|
||||||
from spack.util.path import convert_to_posix_path
|
from spack.util.path import convert_to_posix_path
|
||||||
|
|
||||||
# Regex to extract the primary generator from the CMake generator
|
# Regex to extract the primary generator from the CMake generator
|
||||||
@@ -176,6 +176,7 @@ def _std_args(pkg):
|
|||||||
'-G', generator,
|
'-G', generator,
|
||||||
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
|
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
|
||||||
define('CMAKE_BUILD_TYPE', build_type),
|
define('CMAKE_BUILD_TYPE', build_type),
|
||||||
|
define('BUILD_TESTING', pkg.run_tests),
|
||||||
]
|
]
|
||||||
|
|
||||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||||
@@ -361,6 +362,7 @@ def cmake_args(self):
|
|||||||
|
|
||||||
* CMAKE_INSTALL_PREFIX
|
* CMAKE_INSTALL_PREFIX
|
||||||
* CMAKE_BUILD_TYPE
|
* CMAKE_BUILD_TYPE
|
||||||
|
* BUILD_TESTING
|
||||||
|
|
||||||
which will be set automatically.
|
which will be set automatically.
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.directives import conflicts, depends_on, variant
|
from spack.directives import conflicts, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.package import PackageBase
|
from spack.package_base import PackageBase
|
||||||
|
|
||||||
|
|
||||||
class CudaPackage(PackageBase):
|
class CudaPackage(PackageBase):
|
||||||
@@ -37,6 +37,7 @@ class CudaPackage(PackageBase):
|
|||||||
variant('cuda_arch',
|
variant('cuda_arch',
|
||||||
description='CUDA architecture',
|
description='CUDA architecture',
|
||||||
values=spack.variant.any_combination_of(*cuda_arch_values),
|
values=spack.variant.any_combination_of(*cuda_arch_values),
|
||||||
|
sticky=True,
|
||||||
when='+cuda')
|
when='+cuda')
|
||||||
|
|
||||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
|
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
|
||||||
@@ -107,10 +108,10 @@ def cuda_flags(arch_list):
|
|||||||
# each release of a new cuda minor version.
|
# each release of a new cuda minor version.
|
||||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
|
conflicts('%gcc@12:', when='+cuda ^cuda@:11.7')
|
||||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
|
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
|
||||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.6')
|
conflicts('%clang@14:', when='+cuda ^cuda@:11.7')
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||||
|
|||||||
@@ -3,14 +3,16 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import spack.package
|
from typing import Optional
|
||||||
|
|
||||||
|
import spack.package_base
|
||||||
import spack.util.url
|
import spack.util.url
|
||||||
|
|
||||||
|
|
||||||
class GNUMirrorPackage(spack.package.PackageBase):
|
class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||||
#: Path of the package in a GNU mirror
|
#: Path of the package in a GNU mirror
|
||||||
gnu_mirror_path = None
|
gnu_mirror_path = None # type: Optional[str]
|
||||||
|
|
||||||
#: List of GNU mirrors used by Spack
|
#: List of GNU mirrors used by Spack
|
||||||
base_mirrors = [
|
base_mirrors = [
|
||||||
|
|||||||
@@ -26,7 +26,7 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.package import InstallError, PackageBase, run_after
|
from spack.package_base import InstallError, PackageBase, run_after
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.prefix import Prefix
|
from spack.util.prefix import Prefix
|
||||||
@@ -1115,7 +1115,7 @@ def _setup_dependent_env_callback(
|
|||||||
raise InstallError('compilers_of_client arg required for MPI')
|
raise InstallError('compilers_of_client arg required for MPI')
|
||||||
|
|
||||||
def setup_dependent_package(self, module, dep_spec):
|
def setup_dependent_package(self, module, dep_spec):
|
||||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_package
|
# https://spack.readthedocs.io/en/latest/spack.html#spack.package_base.PackageBase.setup_dependent_package
|
||||||
# Reminder: "module" refers to Python module.
|
# Reminder: "module" refers to Python module.
|
||||||
# Called before the install() method of dependents.
|
# Called before the install() method of dependents.
|
||||||
|
|
||||||
@@ -1259,6 +1259,14 @@ def install(self, spec, prefix):
|
|||||||
for f in glob.glob('%s/intel*log' % tmpdir):
|
for f in glob.glob('%s/intel*log' % tmpdir):
|
||||||
install(f, dst)
|
install(f, dst)
|
||||||
|
|
||||||
|
@run_after('install')
|
||||||
|
def validate_install(self):
|
||||||
|
# Sometimes the installer exits with an error but doesn't pass a
|
||||||
|
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||||
|
# directory to catch this error condition.
|
||||||
|
if not os.path.exists(self.prefix.bin):
|
||||||
|
raise InstallError('The installer has failed to install anything.')
|
||||||
|
|
||||||
@run_after('install')
|
@run_after('install')
|
||||||
def configure_rpath(self):
|
def configure_rpath(self):
|
||||||
if '+rpath' not in self.spec:
|
if '+rpath' not in self.spec:
|
||||||
|
|||||||
102
lib/spack/spack/build_systems/lua.py
Normal file
102
lib/spack/spack/build_systems/lua.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from llnl.util.filesystem import find
|
||||||
|
|
||||||
|
from spack.directives import depends_on, extends
|
||||||
|
from spack.multimethod import when
|
||||||
|
from spack.package_base import PackageBase
|
||||||
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
|
|
||||||
|
class LuaPackage(PackageBase):
|
||||||
|
"""Specialized class for lua packages"""
|
||||||
|
|
||||||
|
phases = ['unpack', 'generate_luarocks_config', 'preprocess', 'install']
|
||||||
|
#: This attribute is used in UI queries that need to know the build
|
||||||
|
#: system base class
|
||||||
|
build_system_class = 'LuaPackage'
|
||||||
|
|
||||||
|
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
|
||||||
|
depends_on('lua-lang')
|
||||||
|
extends('lua', when='^lua')
|
||||||
|
with when('^lua-luajit'):
|
||||||
|
extends('lua-luajit')
|
||||||
|
depends_on('luajit')
|
||||||
|
depends_on('lua-luajit+lualinks')
|
||||||
|
with when('^lua-luajit-openresty'):
|
||||||
|
extends('lua-luajit-openresty')
|
||||||
|
depends_on('luajit')
|
||||||
|
depends_on('lua-luajit-openresty+lualinks')
|
||||||
|
|
||||||
|
def unpack(self, spec, prefix):
|
||||||
|
if os.path.splitext(self.stage.archive_file)[1] == '.rock':
|
||||||
|
directory = self.luarocks('unpack', self.stage.archive_file, output=str)
|
||||||
|
dirlines = directory.split('\n')
|
||||||
|
# TODO: figure out how to scope this better
|
||||||
|
os.chdir(dirlines[2])
|
||||||
|
|
||||||
|
def _generate_tree_line(self, name, prefix):
|
||||||
|
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||||
|
name=name,
|
||||||
|
prefix=prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _luarocks_config_path(self):
|
||||||
|
return os.path.join(self.stage.source_path, 'spack_luarocks.lua')
|
||||||
|
|
||||||
|
def generate_luarocks_config(self, spec, prefix):
|
||||||
|
spec = self.spec
|
||||||
|
table_entries = []
|
||||||
|
for d in spec.traverse(
|
||||||
|
deptypes=("build", "run"), deptype_query="run"
|
||||||
|
):
|
||||||
|
if d.package.extends(self.extendee_spec):
|
||||||
|
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||||
|
|
||||||
|
path = self._luarocks_config_path()
|
||||||
|
with open(path, 'w') as config:
|
||||||
|
config.write(
|
||||||
|
"""
|
||||||
|
deps_mode="all"
|
||||||
|
rocks_trees={{
|
||||||
|
{}
|
||||||
|
}}
|
||||||
|
""".format(
|
||||||
|
"\n".join(table_entries)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return path
|
||||||
|
|
||||||
|
def setup_build_environment(self, env):
|
||||||
|
env.set('LUAROCKS_CONFIG', self._luarocks_config_path())
|
||||||
|
|
||||||
|
def preprocess(self, spec, prefix):
|
||||||
|
"""Override this to preprocess source before building with luarocks"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lua(self):
|
||||||
|
return Executable(self.spec['lua-lang'].prefix.bin.lua)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def luarocks(self):
|
||||||
|
lr = Executable(self.spec['lua-lang'].prefix.bin.luarocks)
|
||||||
|
return lr
|
||||||
|
|
||||||
|
def luarocks_args(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
rock = '.'
|
||||||
|
specs = find('.', '*.rockspec', recursive=False)
|
||||||
|
if specs:
|
||||||
|
rock = specs[0]
|
||||||
|
rocks_args = self.luarocks_args()
|
||||||
|
rocks_args.append(rock)
|
||||||
|
self.luarocks('--tree=' + prefix, 'make', *rocks_args)
|
||||||
@@ -11,7 +11,7 @@
|
|||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
from spack.directives import conflicts
|
from spack.directives import conflicts
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class MakefilePackage(PackageBase):
|
class MakefilePackage(PackageBase):
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
from llnl.util.filesystem import install_tree, working_dir
|
from llnl.util.filesystem import install_tree, working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on
|
from spack.directives import depends_on
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on, variant
|
from spack.directives import depends_on, variant
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class MesonPackage(PackageBase):
|
class MesonPackage(PackageBase):
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class OctavePackage(PackageBase):
|
class OctavePackage(PackageBase):
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||||
|
|
||||||
from spack.package import Package
|
from spack.package_base import Package
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
@@ -30,24 +30,31 @@ class IntelOneApiPackage(Package):
|
|||||||
# organization (e.g. University/Company).
|
# organization (e.g. University/Company).
|
||||||
redistribute_source = False
|
redistribute_source = False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def update_description(cls):
|
||||||
|
"""Updates oneapi package descriptions with common text."""
|
||||||
|
|
||||||
|
text = """ LICENSE INFORMATION: By downloading and using this software, you agree to the terms
|
||||||
|
and conditions of the software license agreements at https://intel.ly/393CijO."""
|
||||||
|
cls.__doc__ = cls.__doc__ + text
|
||||||
|
return cls
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def component_dir(self):
|
def component_dir(self):
|
||||||
"""Subdirectory for this component in the install prefix."""
|
"""Subdirectory for this component in the install prefix."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def component_path(self):
|
def component_prefix(self):
|
||||||
"""Path to component <prefix>/<component>/<version>."""
|
"""Path to component <prefix>/<component>/<version>."""
|
||||||
return join_path(self.prefix, self.component_dir, str(self.spec.version))
|
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
||||||
|
|
||||||
def install(self, spec, prefix, installer_path=None):
|
def install(self, spec, prefix):
|
||||||
|
self.install_component(basename(self.url_for_version(spec.version)))
|
||||||
|
|
||||||
|
def install_component(self, installer_path):
|
||||||
"""Shared install method for all oneapi packages."""
|
"""Shared install method for all oneapi packages."""
|
||||||
|
|
||||||
# intel-oneapi-compilers overrides the installer_path when
|
|
||||||
# installing fortran, which comes from a spack resource
|
|
||||||
if installer_path is None:
|
|
||||||
installer_path = basename(self.url_for_version(spec.version))
|
|
||||||
|
|
||||||
if platform.system() == 'Linux':
|
if platform.system() == 'Linux':
|
||||||
# Intel installer assumes and enforces that all components
|
# Intel installer assumes and enforces that all components
|
||||||
# are installed into a single prefix. Spack wants to
|
# are installed into a single prefix. Spack wants to
|
||||||
@@ -68,7 +75,7 @@ def install(self, spec, prefix, installer_path=None):
|
|||||||
bash = Executable('bash')
|
bash = Executable('bash')
|
||||||
|
|
||||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||||
bash.add_default_env('HOME', prefix)
|
bash.add_default_env('HOME', self.prefix)
|
||||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||||
bash.add_default_env('XDG_RUNTIME_DIR',
|
bash.add_default_env('XDG_RUNTIME_DIR',
|
||||||
join_path(self.stage.path, 'runtime'))
|
join_path(self.stage.path, 'runtime'))
|
||||||
@@ -76,13 +83,13 @@ def install(self, spec, prefix, installer_path=None):
|
|||||||
bash(installer_path,
|
bash(installer_path,
|
||||||
'-s', '-a', '-s', '--action', 'install',
|
'-s', '-a', '-s', '--action', 'install',
|
||||||
'--eula', 'accept',
|
'--eula', 'accept',
|
||||||
'--install-dir', prefix)
|
'--install-dir', self.prefix)
|
||||||
|
|
||||||
if getpass.getuser() == 'root':
|
if getpass.getuser() == 'root':
|
||||||
shutil.rmtree('/var/intel/installercache', ignore_errors=True)
|
shutil.rmtree('/var/intel/installercache', ignore_errors=True)
|
||||||
|
|
||||||
# Some installers have a bug and do not return an error code when failing
|
# Some installers have a bug and do not return an error code when failing
|
||||||
if not isdir(join_path(prefix, self.component_dir)):
|
if not isdir(join_path(self.prefix, self.component_dir)):
|
||||||
raise RuntimeError('install failed')
|
raise RuntimeError('install failed')
|
||||||
|
|
||||||
def setup_run_environment(self, env):
|
def setup_run_environment(self, env):
|
||||||
@@ -95,7 +102,7 @@ def setup_run_environment(self, env):
|
|||||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||||
"""
|
"""
|
||||||
env.extend(EnvironmentModifications.from_sourcing_file(
|
env.extend(EnvironmentModifications.from_sourcing_file(
|
||||||
join_path(self.component_path, 'env', 'vars.sh')))
|
join_path(self.component_prefix, 'env', 'vars.sh')))
|
||||||
|
|
||||||
|
|
||||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||||
@@ -109,12 +116,12 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
include_path = join_path(self.component_path, 'include')
|
include_path = join_path(self.component_prefix, 'include')
|
||||||
return find_headers('*', include_path, recursive=True)
|
return find_headers('*', include_path, recursive=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def libs(self):
|
def libs(self):
|
||||||
lib_path = join_path(self.component_path, 'lib', 'intel64')
|
lib_path = join_path(self.component_prefix, 'lib', 'intel64')
|
||||||
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
||||||
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
from llnl.util.filesystem import filter_file
|
from llnl.util.filesystem import filter_file
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,26 +6,30 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import (
|
from llnl.util.filesystem import (
|
||||||
filter_file,
|
filter_file,
|
||||||
find,
|
find,
|
||||||
|
find_all_headers,
|
||||||
|
find_libraries,
|
||||||
is_nonsymlink_exe_with_shebang,
|
is_nonsymlink_exe_with_shebang,
|
||||||
path_contains_subdirectory,
|
path_contains_subdirectory,
|
||||||
same_path,
|
same_path,
|
||||||
working_dir,
|
working_dir,
|
||||||
)
|
)
|
||||||
from llnl.util.lang import match_predicate
|
from llnl.util.lang import classproperty, match_predicate
|
||||||
|
|
||||||
from spack.directives import depends_on, extends
|
from spack.directives import depends_on, extends
|
||||||
from spack.package import PackageBase, run_after
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class PythonPackage(PackageBase):
|
class PythonPackage(PackageBase):
|
||||||
"""Specialized class for packages that are built using pip."""
|
"""Specialized class for packages that are built using pip."""
|
||||||
#: Package name, version, and extension on PyPI
|
#: Package name, version, and extension on PyPI
|
||||||
pypi = None
|
pypi = None # type: Optional[str]
|
||||||
|
|
||||||
maintainers = ['adamjstewart']
|
maintainers = ['adamjstewart']
|
||||||
|
|
||||||
@@ -46,7 +50,7 @@ class PythonPackage(PackageBase):
|
|||||||
# package manually
|
# package manually
|
||||||
depends_on('py-wheel', type='build')
|
depends_on('py-wheel', type='build')
|
||||||
|
|
||||||
py_namespace = None
|
py_namespace = None # type: Optional[str]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _std_args(cls):
|
def _std_args(cls):
|
||||||
@@ -73,24 +77,21 @@ def _std_args(cls):
|
|||||||
'--no-index',
|
'--no-index',
|
||||||
]
|
]
|
||||||
|
|
||||||
@property
|
@classproperty
|
||||||
def homepage(self):
|
def homepage(cls):
|
||||||
if self.pypi:
|
if cls.pypi:
|
||||||
name = self.pypi.split('/')[0]
|
name = cls.pypi.split('/')[0]
|
||||||
return 'https://pypi.org/project/' + name + '/'
|
return 'https://pypi.org/project/' + name + '/'
|
||||||
|
|
||||||
@property
|
@classproperty
|
||||||
def url(self):
|
def url(cls):
|
||||||
if self.pypi:
|
if cls.pypi:
|
||||||
return (
|
return 'https://files.pythonhosted.org/packages/source/' + cls.pypi[0] + '/' + cls.pypi
|
||||||
'https://files.pythonhosted.org/packages/source/'
|
|
||||||
+ self.pypi[0] + '/' + self.pypi
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@classproperty
|
||||||
def list_url(self):
|
def list_url(cls):
|
||||||
if self.pypi:
|
if cls.pypi:
|
||||||
name = self.pypi.split('/')[0]
|
name = cls.pypi.split('/')[0]
|
||||||
return 'https://pypi.org/simple/' + name + '/'
|
return 'https://pypi.org/simple/' + name + '/'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -177,6 +178,37 @@ def install(self, spec, prefix):
|
|||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
"""Discover header files in platlib."""
|
||||||
|
|
||||||
|
# Headers may be in either location
|
||||||
|
include = inspect.getmodule(self).include
|
||||||
|
platlib = inspect.getmodule(self).platlib
|
||||||
|
headers = find_all_headers(include) + find_all_headers(platlib)
|
||||||
|
|
||||||
|
if headers:
|
||||||
|
return headers
|
||||||
|
|
||||||
|
msg = 'Unable to locate {} headers in {} or {}'
|
||||||
|
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def libs(self):
|
||||||
|
"""Discover libraries in platlib."""
|
||||||
|
|
||||||
|
# Remove py- prefix in package name
|
||||||
|
library = 'lib' + self.spec.name[3:].replace('-', '?')
|
||||||
|
root = inspect.getmodule(self).platlib
|
||||||
|
|
||||||
|
for shared in [True, False]:
|
||||||
|
libs = find_libraries(library, root, shared=shared, recursive=True)
|
||||||
|
if libs:
|
||||||
|
return libs
|
||||||
|
|
||||||
|
msg = 'Unable to recursively locate {} libraries in {}'
|
||||||
|
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on
|
from spack.directives import depends_on
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class QMakePackage(PackageBase):
|
class QMakePackage(PackageBase):
|
||||||
|
|||||||
@@ -2,12 +2,13 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import llnl.util.lang as lang
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class RPackage(PackageBase):
|
class RPackage(PackageBase):
|
||||||
@@ -28,10 +29,10 @@ class RPackage(PackageBase):
|
|||||||
# package attributes that can be expanded to set the homepage, url,
|
# package attributes that can be expanded to set the homepage, url,
|
||||||
# list_url, and git values
|
# list_url, and git values
|
||||||
# For CRAN packages
|
# For CRAN packages
|
||||||
cran = None
|
cran = None # type: Optional[str]
|
||||||
|
|
||||||
# For Bioconductor packages
|
# For Bioconductor packages
|
||||||
bioc = None
|
bioc = None # type: Optional[str]
|
||||||
|
|
||||||
maintainers = ['glennpj']
|
maintainers = ['glennpj']
|
||||||
|
|
||||||
@@ -41,27 +42,27 @@ class RPackage(PackageBase):
|
|||||||
|
|
||||||
extends('r')
|
extends('r')
|
||||||
|
|
||||||
@property
|
@lang.classproperty
|
||||||
def homepage(self):
|
def homepage(cls):
|
||||||
if self.cran:
|
if cls.cran:
|
||||||
return 'https://cloud.r-project.org/package=' + self.cran
|
return 'https://cloud.r-project.org/package=' + cls.cran
|
||||||
elif self.bioc:
|
elif cls.bioc:
|
||||||
return 'https://bioconductor.org/packages/' + self.bioc
|
return 'https://bioconductor.org/packages/' + cls.bioc
|
||||||
|
|
||||||
@property
|
@lang.classproperty
|
||||||
def url(self):
|
def url(cls):
|
||||||
if self.cran:
|
if cls.cran:
|
||||||
return (
|
return (
|
||||||
'https://cloud.r-project.org/src/contrib/'
|
'https://cloud.r-project.org/src/contrib/'
|
||||||
+ self.cran + '_' + str(list(self.versions)[0]) + '.tar.gz'
|
+ cls.cran + '_' + str(list(cls.versions)[0]) + '.tar.gz'
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@lang.classproperty
|
||||||
def list_url(self):
|
def list_url(cls):
|
||||||
if self.cran:
|
if cls.cran:
|
||||||
return (
|
return (
|
||||||
'https://cloud.r-project.org/src/contrib/Archive/'
|
'https://cloud.r-project.org/src/contrib/Archive/'
|
||||||
+ self.cran + '/'
|
+ cls.cran + '/'
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -3,13 +3,15 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import llnl.util.lang as lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package import PackageBase
|
from spack.package_base import PackageBase
|
||||||
from spack.util.environment import env_flag
|
from spack.util.environment import env_flag
|
||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
@@ -36,14 +38,14 @@ class RacketPackage(PackageBase):
|
|||||||
extends('racket')
|
extends('racket')
|
||||||
|
|
||||||
pkgs = False
|
pkgs = False
|
||||||
subdirectory = None
|
subdirectory = None # type: Optional[str]
|
||||||
name = None
|
name = None # type: Optional[str]
|
||||||
parallel = True
|
parallel = True
|
||||||
|
|
||||||
@property
|
@lang.classproperty
|
||||||
def homepage(self):
|
def homepage(cls):
|
||||||
if self.pkgs:
|
if cls.pkgs:
|
||||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(self.name)
|
return 'https://pkgs.racket-lang.org/package/{0}'.format(cls.name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
|
|||||||
@@ -77,7 +77,7 @@
|
|||||||
|
|
||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.directives import conflicts, depends_on, variant
|
from spack.directives import conflicts, depends_on, variant
|
||||||
from spack.package import PackageBase
|
from spack.package_base import PackageBase
|
||||||
|
|
||||||
|
|
||||||
class ROCmPackage(PackageBase):
|
class ROCmPackage(PackageBase):
|
||||||
@@ -90,9 +90,10 @@ class ROCmPackage(PackageBase):
|
|||||||
# https://llvm.org/docs/AMDGPUUsage.html
|
# https://llvm.org/docs/AMDGPUUsage.html
|
||||||
# Possible architectures
|
# Possible architectures
|
||||||
amdgpu_targets = (
|
amdgpu_targets = (
|
||||||
'gfx701', 'gfx801', 'gfx802', 'gfx803',
|
'gfx701', 'gfx801', 'gfx802', 'gfx803', 'gfx900', 'gfx900:xnack-',
|
||||||
'gfx900', 'gfx906', 'gfx908', 'gfx90a', 'gfx1010',
|
'gfx906', 'gfx908', 'gfx90a',
|
||||||
'gfx1011', 'gfx1012'
|
'gfx906:xnack-', 'gfx908:xnack-', 'gfx90a:xnack-', 'gfx90a:xnack+',
|
||||||
|
'gfx1010', 'gfx1011', 'gfx1012', 'gfx1030', 'gfx1031',
|
||||||
)
|
)
|
||||||
|
|
||||||
variant('rocm', default=False, description='Enable ROCm support')
|
variant('rocm', default=False, description='Enable ROCm support')
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class RubyPackage(PackageBase):
|
class RubyPackage(PackageBase):
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from spack.directives import depends_on
|
from spack.directives import depends_on
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class SConsPackage(PackageBase):
|
class SConsPackage(PackageBase):
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
from llnl.util.filesystem import find, join_path, working_dir
|
from llnl.util.filesystem import find, join_path, working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on, extends
|
from spack.directives import depends_on, extends
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class SIPPackage(PackageBase):
|
class SIPPackage(PackageBase):
|
||||||
|
|||||||
@@ -3,15 +3,17 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import spack.package
|
from typing import Optional
|
||||||
|
|
||||||
|
import spack.package_base
|
||||||
import spack.util.url
|
import spack.util.url
|
||||||
|
|
||||||
|
|
||||||
class SourceforgePackage(spack.package.PackageBase):
|
class SourceforgePackage(spack.package_base.PackageBase):
|
||||||
"""Mixin that takes care of setting url and mirrors for Sourceforge
|
"""Mixin that takes care of setting url and mirrors for Sourceforge
|
||||||
packages."""
|
packages."""
|
||||||
#: Path of the package in a Sourceforge mirror
|
#: Path of the package in a Sourceforge mirror
|
||||||
sourceforge_mirror_path = None
|
sourceforge_mirror_path = None # type: Optional[str]
|
||||||
|
|
||||||
#: List of Sourceforge mirrors used by Spack
|
#: List of Sourceforge mirrors used by Spack
|
||||||
base_mirrors = [
|
base_mirrors = [
|
||||||
|
|||||||
@@ -2,16 +2,17 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import spack.package
|
import spack.package_base
|
||||||
import spack.util.url
|
import spack.util.url
|
||||||
|
|
||||||
|
|
||||||
class SourcewarePackage(spack.package.PackageBase):
|
class SourcewarePackage(spack.package_base.PackageBase):
|
||||||
"""Mixin that takes care of setting url and mirrors for Sourceware.org
|
"""Mixin that takes care of setting url and mirrors for Sourceware.org
|
||||||
packages."""
|
packages."""
|
||||||
#: Path of the package in a Sourceware mirror
|
#: Path of the package in a Sourceware mirror
|
||||||
sourceware_mirror_path = None
|
sourceware_mirror_path = None # type: Optional[str]
|
||||||
|
|
||||||
#: List of Sourceware mirrors used by Spack
|
#: List of Sourceware mirrors used by Spack
|
||||||
base_mirrors = [
|
base_mirrors = [
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on
|
from spack.directives import depends_on
|
||||||
from spack.package import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
class WafPackage(PackageBase):
|
class WafPackage(PackageBase):
|
||||||
|
|||||||
@@ -3,15 +3,17 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import spack.package
|
from typing import Optional
|
||||||
|
|
||||||
|
import spack.package_base
|
||||||
import spack.util.url
|
import spack.util.url
|
||||||
|
|
||||||
|
|
||||||
class XorgPackage(spack.package.PackageBase):
|
class XorgPackage(spack.package_base.PackageBase):
|
||||||
"""Mixin that takes care of setting url and mirrors for x.org
|
"""Mixin that takes care of setting url and mirrors for x.org
|
||||||
packages."""
|
packages."""
|
||||||
#: Path of the package in a x.org mirror
|
#: Path of the package in a x.org mirror
|
||||||
xorg_mirror_path = None
|
xorg_mirror_path = None # type: Optional[str]
|
||||||
|
|
||||||
#: List of x.org mirrors used by Spack
|
#: List of x.org mirrors used by Spack
|
||||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||||
|
|||||||
@@ -33,7 +33,6 @@
|
|||||||
import spack.util.executable as exe
|
import spack.util.executable as exe
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url as url_util
|
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@@ -42,10 +41,8 @@
|
|||||||
'always',
|
'always',
|
||||||
]
|
]
|
||||||
|
|
||||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
|
||||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
|
||||||
'shared_pr_mirror')
|
|
||||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||||
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
|
|
||||||
spack_gpg = spack.main.SpackCommand('gpg')
|
spack_gpg = spack.main.SpackCommand('gpg')
|
||||||
spack_compiler = spack.main.SpackCommand('compiler')
|
spack_compiler = spack.main.SpackCommand('compiler')
|
||||||
@@ -90,8 +87,8 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
|||||||
return build_group_id
|
return build_group_id
|
||||||
|
|
||||||
|
|
||||||
def populate_buildgroup(job_names, group_name, project, site,
|
def _populate_buildgroup(job_names, group_name, project, site,
|
||||||
credentials, cdash_url):
|
credentials, cdash_url):
|
||||||
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
@@ -132,16 +129,30 @@ def populate_buildgroup(job_names, group_name, project, site,
|
|||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
msg = 'Error response code ({0}) in populate_buildgroup'.format(
|
msg = 'Error response code ({0}) in _populate_buildgroup'.format(
|
||||||
response_code)
|
response_code)
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
|
|
||||||
|
|
||||||
def is_main_phase(phase_name):
|
def _is_main_phase(phase_name):
|
||||||
return True if phase_name == 'specs' else False
|
return True if phase_name == 'specs' else False
|
||||||
|
|
||||||
|
|
||||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||||
|
""" Given the necessary parts, format the gitlab job name
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
phase (str): Either 'specs' for the main phase, or the name of a
|
||||||
|
bootstrapping phase
|
||||||
|
strip_compiler (bool): Should compiler be stripped from job name
|
||||||
|
spec (spack.spec.Spec): Spec job will build
|
||||||
|
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||||
|
but sphinx doesn't recognize the type and fails).
|
||||||
|
build_group (str): Name of build group this job belongs to (a CDash
|
||||||
|
notion)
|
||||||
|
|
||||||
|
Returns: The job name
|
||||||
|
"""
|
||||||
item_idx = 0
|
item_idx = 0
|
||||||
format_str = ''
|
format_str = ''
|
||||||
format_args = []
|
format_args = []
|
||||||
@@ -163,7 +174,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
|||||||
format_args.append(spec.version)
|
format_args.append(spec.version)
|
||||||
item_idx += 1
|
item_idx += 1
|
||||||
|
|
||||||
if is_main_phase(phase) is True or strip_compiler is False:
|
if _is_main_phase(phase) is True or strip_compiler is False:
|
||||||
format_str += ' {{{0}}}'.format(item_idx)
|
format_str += ' {{{0}}}'.format(item_idx)
|
||||||
format_args.append(spec.compiler)
|
format_args.append(spec.compiler)
|
||||||
item_idx += 1
|
item_idx += 1
|
||||||
@@ -180,12 +191,17 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
|||||||
return format_str.format(*format_args)
|
return format_str.format(*format_args)
|
||||||
|
|
||||||
|
|
||||||
def get_cdash_build_name(spec, build_group):
|
def _get_cdash_build_name(spec, build_group):
|
||||||
return '{0}@{1}%{2} arch={3} ({4})'.format(
|
return '{0}@{1}%{2} arch={3} ({4})'.format(
|
||||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||||
|
|
||||||
|
|
||||||
def get_spec_string(spec):
|
def _remove_reserved_tags(tags):
|
||||||
|
"""Convenience function to strip reserved tags from jobs"""
|
||||||
|
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||||
|
|
||||||
|
|
||||||
|
def _get_spec_string(spec):
|
||||||
format_elements = [
|
format_elements = [
|
||||||
'{name}{@version}',
|
'{name}{@version}',
|
||||||
'{%compiler}',
|
'{%compiler}',
|
||||||
@@ -197,15 +213,15 @@ def get_spec_string(spec):
|
|||||||
return spec.format(''.join(format_elements))
|
return spec.format(''.join(format_elements))
|
||||||
|
|
||||||
|
|
||||||
def format_root_spec(spec, main_phase, strip_compiler):
|
def _format_root_spec(spec, main_phase, strip_compiler):
|
||||||
if main_phase is False and strip_compiler is True:
|
if main_phase is False and strip_compiler is True:
|
||||||
return '{0}@{1} arch={2}'.format(
|
return '{0}@{1} arch={2}'.format(
|
||||||
spec.name, spec.version, spec.architecture)
|
spec.name, spec.version, spec.architecture)
|
||||||
else:
|
else:
|
||||||
return spec.build_hash()
|
return spec.dag_hash()
|
||||||
|
|
||||||
|
|
||||||
def spec_deps_key(s):
|
def _spec_deps_key(s):
|
||||||
return '{0}/{1}'.format(s.name, s.dag_hash(7))
|
return '{0}/{1}'.format(s.name, s.dag_hash(7))
|
||||||
|
|
||||||
|
|
||||||
@@ -217,8 +233,10 @@ def _add_dependency(spec_label, dep_label, deps):
|
|||||||
deps[spec_label].add(dep_label)
|
deps[spec_label].add(dep_label)
|
||||||
|
|
||||||
|
|
||||||
def get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||||
spec_deps_obj = compute_spec_deps(specs, check_index_only=check_index_only)
|
mirrors_to_check=None):
|
||||||
|
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
|
||||||
|
mirrors_to_check=mirrors_to_check)
|
||||||
|
|
||||||
if spec_deps_obj:
|
if spec_deps_obj:
|
||||||
dependencies = spec_deps_obj['dependencies']
|
dependencies = spec_deps_obj['dependencies']
|
||||||
@@ -235,7 +253,7 @@ def get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
|||||||
_add_dependency(entry['spec'], entry['depends'], deps)
|
_add_dependency(entry['spec'], entry['depends'], deps)
|
||||||
|
|
||||||
|
|
||||||
def stage_spec_jobs(specs, check_index_only=False):
|
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||||
"""Take a set of release specs and generate a list of "stages", where the
|
"""Take a set of release specs and generate a list of "stages", where the
|
||||||
jobs in any stage are dependent only on jobs in previous stages. This
|
jobs in any stage are dependent only on jobs in previous stages. This
|
||||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||||
@@ -247,6 +265,8 @@ def stage_spec_jobs(specs, check_index_only=False):
|
|||||||
are up to date on those mirrors. This flag limits that search to
|
are up to date on those mirrors. This flag limits that search to
|
||||||
the binary cache indices on those mirrors to speed the process up,
|
the binary cache indices on those mirrors to speed the process up,
|
||||||
even though there is no garantee the index is up to date.
|
even though there is no garantee the index is up to date.
|
||||||
|
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||||
|
any configured mirrors.
|
||||||
|
|
||||||
Returns: A tuple of information objects describing the specs, dependencies
|
Returns: A tuple of information objects describing the specs, dependencies
|
||||||
and stages:
|
and stages:
|
||||||
@@ -266,11 +286,11 @@ def stage_spec_jobs(specs, check_index_only=False):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The convenience method below, "remove_satisfied_deps()", does not modify
|
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
||||||
# the "deps" parameter. Instead, it returns a new dictionary where only
|
# the "deps" parameter. Instead, it returns a new dictionary where only
|
||||||
# dependencies which have not yet been satisfied are included in the
|
# dependencies which have not yet been satisfied are included in the
|
||||||
# return value.
|
# return value.
|
||||||
def remove_satisfied_deps(deps, satisfied_list):
|
def _remove_satisfied_deps(deps, satisfied_list):
|
||||||
new_deps = {}
|
new_deps = {}
|
||||||
|
|
||||||
for key, value in iteritems(deps):
|
for key, value in iteritems(deps):
|
||||||
@@ -283,8 +303,8 @@ def remove_satisfied_deps(deps, satisfied_list):
|
|||||||
deps = {}
|
deps = {}
|
||||||
spec_labels = {}
|
spec_labels = {}
|
||||||
|
|
||||||
get_spec_dependencies(
|
_get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
|
||||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
mirrors_to_check=mirrors_to_check)
|
||||||
|
|
||||||
# Save the original deps, as we need to return them at the end of the
|
# Save the original deps, as we need to return them at the end of the
|
||||||
# function. In the while loop below, the "dependencies" variable is
|
# function. In the while loop below, the "dependencies" variable is
|
||||||
@@ -302,7 +322,7 @@ def remove_satisfied_deps(deps, satisfied_list):
|
|||||||
# Note that "dependencies" is a dictionary mapping each dependent
|
# Note that "dependencies" is a dictionary mapping each dependent
|
||||||
# package to the set of not-yet-handled dependencies. The final step
|
# package to the set of not-yet-handled dependencies. The final step
|
||||||
# below removes all the dependencies that are handled by this stage.
|
# below removes all the dependencies that are handled by this stage.
|
||||||
dependencies = remove_satisfied_deps(dependencies, next_stage)
|
dependencies = _remove_satisfied_deps(dependencies, next_stage)
|
||||||
|
|
||||||
if unstaged:
|
if unstaged:
|
||||||
stages.append(unstaged.copy())
|
stages.append(unstaged.copy())
|
||||||
@@ -310,13 +330,12 @@ def remove_satisfied_deps(deps, satisfied_list):
|
|||||||
return spec_labels, deps, stages
|
return spec_labels, deps, stages
|
||||||
|
|
||||||
|
|
||||||
def print_staging_summary(spec_labels, dependencies, stages):
|
def _print_staging_summary(spec_labels, dependencies, stages):
|
||||||
if not stages:
|
if not stages:
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg(' Staging summary:')
|
tty.msg(' Staging summary ([x] means a job needs rebuilding):')
|
||||||
stage_index = 0
|
for stage_index, stage in enumerate(stages):
|
||||||
for stage in stages:
|
|
||||||
tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
|
tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
|
||||||
|
|
||||||
for job in sorted(stage):
|
for job in sorted(stage):
|
||||||
@@ -324,12 +343,10 @@ def print_staging_summary(spec_labels, dependencies, stages):
|
|||||||
tty.msg(' [{1}] {0} -> {2}'.format(
|
tty.msg(' [{1}] {0} -> {2}'.format(
|
||||||
job,
|
job,
|
||||||
'x' if spec_labels[job]['needs_rebuild'] else ' ',
|
'x' if spec_labels[job]['needs_rebuild'] else ' ',
|
||||||
get_spec_string(s)))
|
_get_spec_string(s)))
|
||||||
|
|
||||||
stage_index += 1
|
|
||||||
|
|
||||||
|
|
||||||
def compute_spec_deps(spec_list, check_index_only=False):
|
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||||
"""
|
"""
|
||||||
Computes all the dependencies for the spec(s) and generates a JSON
|
Computes all the dependencies for the spec(s) and generates a JSON
|
||||||
object which provides both a list of unique spec names as well as a
|
object which provides both a list of unique spec names as well as a
|
||||||
@@ -402,17 +419,17 @@ def append_dep(s, d):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||||
spec=s, full_hash_match=True, index_only=check_index_only)
|
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
|
||||||
|
|
||||||
skey = spec_deps_key(s)
|
skey = _spec_deps_key(s)
|
||||||
spec_labels[skey] = {
|
spec_labels[skey] = {
|
||||||
'spec': get_spec_string(s),
|
'spec': _get_spec_string(s),
|
||||||
'root': root_spec,
|
'root': root_spec,
|
||||||
'needs_rebuild': not up_to_date_mirrors,
|
'needs_rebuild': not up_to_date_mirrors,
|
||||||
}
|
}
|
||||||
|
|
||||||
for d in s.dependencies(deptype=all):
|
for d in s.dependencies(deptype=all):
|
||||||
dkey = spec_deps_key(d)
|
dkey = _spec_deps_key(d)
|
||||||
if d.external:
|
if d.external:
|
||||||
tty.msg('Will not stage external dep: {0}'.format(d))
|
tty.msg('Will not stage external dep: {0}'.format(d))
|
||||||
continue
|
continue
|
||||||
@@ -435,11 +452,11 @@ def append_dep(s, d):
|
|||||||
return deps_json_obj
|
return deps_json_obj
|
||||||
|
|
||||||
|
|
||||||
def spec_matches(spec, match_string):
|
def _spec_matches(spec, match_string):
|
||||||
return spec.satisfies(match_string)
|
return spec.satisfies(match_string)
|
||||||
|
|
||||||
|
|
||||||
def copy_attributes(attrs_list, src_dict, dest_dict):
|
def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||||
for runner_attr in attrs_list:
|
for runner_attr in attrs_list:
|
||||||
if runner_attr in src_dict:
|
if runner_attr in src_dict:
|
||||||
if runner_attr in dest_dict and runner_attr == 'tags':
|
if runner_attr in dest_dict and runner_attr == 'tags':
|
||||||
@@ -460,7 +477,7 @@ def copy_attributes(attrs_list, src_dict, dest_dict):
|
|||||||
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
||||||
|
|
||||||
|
|
||||||
def find_matching_config(spec, gitlab_ci):
|
def _find_matching_config(spec, gitlab_ci):
|
||||||
runner_attributes = {}
|
runner_attributes = {}
|
||||||
overridable_attrs = [
|
overridable_attrs = [
|
||||||
'image',
|
'image',
|
||||||
@@ -471,16 +488,16 @@ def find_matching_config(spec, gitlab_ci):
|
|||||||
'after_script',
|
'after_script',
|
||||||
]
|
]
|
||||||
|
|
||||||
copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||||
|
|
||||||
ci_mappings = gitlab_ci['mappings']
|
ci_mappings = gitlab_ci['mappings']
|
||||||
for ci_mapping in ci_mappings:
|
for ci_mapping in ci_mappings:
|
||||||
for match_string in ci_mapping['match']:
|
for match_string in ci_mapping['match']:
|
||||||
if spec_matches(spec, match_string):
|
if _spec_matches(spec, match_string):
|
||||||
if 'runner-attributes' in ci_mapping:
|
if 'runner-attributes' in ci_mapping:
|
||||||
copy_attributes(overridable_attrs,
|
_copy_attributes(overridable_attrs,
|
||||||
ci_mapping['runner-attributes'],
|
ci_mapping['runner-attributes'],
|
||||||
runner_attributes)
|
runner_attributes)
|
||||||
return runner_attributes
|
return runner_attributes
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
@@ -488,16 +505,16 @@ def find_matching_config(spec, gitlab_ci):
|
|||||||
return runner_attributes
|
return runner_attributes
|
||||||
|
|
||||||
|
|
||||||
def pkg_name_from_spec_label(spec_label):
|
def _pkg_name_from_spec_label(spec_label):
|
||||||
return spec_label[:spec_label.index('/')]
|
return spec_label[:spec_label.index('/')]
|
||||||
|
|
||||||
|
|
||||||
def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
def _format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||||
osname, build_group, prune_dag, stage_spec_dict,
|
osname, build_group, prune_dag, stage_spec_dict,
|
||||||
enable_artifacts_buildcache):
|
enable_artifacts_buildcache):
|
||||||
needs_list = []
|
needs_list = []
|
||||||
for dep_job in dep_jobs:
|
for dep_job in dep_jobs:
|
||||||
dep_spec_key = spec_deps_key(dep_job)
|
dep_spec_key = _spec_deps_key(dep_job)
|
||||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
dep_spec_info = stage_spec_dict[dep_spec_key]
|
||||||
|
|
||||||
if not prune_dag or dep_spec_info['needs_rebuild']:
|
if not prune_dag or dep_spec_info['needs_rebuild']:
|
||||||
@@ -591,7 +608,38 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
|
|||||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||||
prune_dag=False, check_index_only=False,
|
prune_dag=False, check_index_only=False,
|
||||||
run_optimizer=False, use_dependencies=False,
|
run_optimizer=False, use_dependencies=False,
|
||||||
artifacts_root=None):
|
artifacts_root=None, remote_mirror_override=None):
|
||||||
|
""" Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||||
|
the spec matrix in the active environment.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
env (spack.environment.Environment): Activated environment object
|
||||||
|
which must contain a gitlab-ci section describing how to map
|
||||||
|
specs to runners
|
||||||
|
print_summary (bool): Should we print a summary of all the jobs in
|
||||||
|
the stages in which they were placed.
|
||||||
|
output_file (str): File path where generated file should be written
|
||||||
|
prune_dag (bool): If True, do not generate jobs for specs already
|
||||||
|
exist built on the mirror.
|
||||||
|
check_index_only (bool): If True, attempt to fetch the mirror index
|
||||||
|
and only use that to determine whether built specs on the mirror
|
||||||
|
this mode results in faster yaml generation time). Otherwise, also
|
||||||
|
check each spec directly by url (useful if there is no index or it
|
||||||
|
might be out of date).
|
||||||
|
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||||
|
try to reduce the size (attempts to collect repeated configuration
|
||||||
|
and replace with definitions).)
|
||||||
|
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||||
|
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||||
|
be configured to handle more than a few "needs" per job.
|
||||||
|
artifacts_root (str): Path where artifacts like logs, environment
|
||||||
|
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||||
|
requires this to be within the project directory.
|
||||||
|
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||||
|
is used to populate several mirrors with binaries, based on some
|
||||||
|
criteria. Spack protected pipelines populate different mirrors based
|
||||||
|
on branch name, facilitated by this option.
|
||||||
|
"""
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
env.concretize()
|
env.concretize()
|
||||||
@@ -640,17 +688,19 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
for s in affected_specs:
|
for s in affected_specs:
|
||||||
tty.debug(' {0}'.format(s.name))
|
tty.debug(' {0}'.format(s.name))
|
||||||
|
|
||||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
# Downstream jobs will "need" (depend on, for both scheduling and
|
||||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
# artifacts, which include spack.lock file) this pipeline generation
|
||||||
|
# job by both name and pipeline id. If those environment variables
|
||||||
|
# do not exist, then maybe this is just running in a shell, in which
|
||||||
|
# case, there is no expectation gitlab will ever run the generated
|
||||||
|
# pipeline and those environment variables do not matter.
|
||||||
|
generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
|
||||||
|
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
|
||||||
|
|
||||||
|
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||||
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
||||||
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
|
||||||
|
|
||||||
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
|
spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
|
||||||
pr_mirror_url = None
|
|
||||||
if spack_pr_branch:
|
|
||||||
pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
|
||||||
spack_pr_branch)
|
|
||||||
|
|
||||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||||
tty.die('spack ci generate requires an env containing a mirror')
|
tty.die('spack ci generate requires an env containing a mirror')
|
||||||
@@ -705,14 +755,29 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
'strip-compilers': False,
|
'strip-compilers': False,
|
||||||
})
|
})
|
||||||
|
|
||||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
# If a remote mirror override (alternate buildcache destination) was
|
||||||
# be up to date in one of those and thus not need to be rebuilt.
|
# specified, add it here in case it has already built hashes we might
|
||||||
if pr_mirror_url:
|
# generate.
|
||||||
|
mirrors_to_check = None
|
||||||
|
if remote_mirror_override:
|
||||||
|
if spack_pipeline_type == 'spack_protected_branch':
|
||||||
|
# Overriding the main mirror in this case might result
|
||||||
|
# in skipping jobs on a release pipeline because specs are
|
||||||
|
# up to date in develop. Eventually we want to notice and take
|
||||||
|
# advantage of this by scheduling a job to copy the spec from
|
||||||
|
# develop to the release, but until we have that, this makes
|
||||||
|
# sure we schedule a rebuild job if the spec isn't already in
|
||||||
|
# override mirror.
|
||||||
|
mirrors_to_check = {
|
||||||
|
'override': remote_mirror_override
|
||||||
|
}
|
||||||
|
|
||||||
|
# If we have a remote override and we want generate pipeline using
|
||||||
|
# --check-index-only, then the override mirror needs to be added to
|
||||||
|
# the configured mirrors when bindist.update() is run, or else we
|
||||||
|
# won't fetch its index and include in our local cache.
|
||||||
spack.mirror.add(
|
spack.mirror.add(
|
||||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||||
spack.mirror.add('ci_shared_pr_mirror',
|
|
||||||
SPACK_SHARED_PR_MIRROR_URL,
|
|
||||||
cfg.default_modify_scope())
|
|
||||||
|
|
||||||
pipeline_artifacts_dir = artifacts_root
|
pipeline_artifacts_dir = artifacts_root
|
||||||
if not pipeline_artifacts_dir:
|
if not pipeline_artifacts_dir:
|
||||||
@@ -758,7 +823,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
user_artifacts_dir, ci_project_dir)
|
user_artifacts_dir, ci_project_dir)
|
||||||
|
|
||||||
# Speed up staging by first fetching binary indices from all mirrors
|
# Speed up staging by first fetching binary indices from all mirrors
|
||||||
# (including the per-PR mirror we may have just added above).
|
# (including the override mirror we may have just added above).
|
||||||
try:
|
try:
|
||||||
bindist.binary_index.update()
|
bindist.binary_index.update()
|
||||||
except bindist.FetchCacheError as e:
|
except bindist.FetchCacheError as e:
|
||||||
@@ -787,10 +852,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
phase_spec.concretize()
|
phase_spec.concretize()
|
||||||
staged_phases[phase_name] = stage_spec_jobs(
|
staged_phases[phase_name] = stage_spec_jobs(
|
||||||
concrete_phase_specs,
|
concrete_phase_specs,
|
||||||
check_index_only=check_index_only)
|
check_index_only=check_index_only,
|
||||||
|
mirrors_to_check=mirrors_to_check)
|
||||||
finally:
|
finally:
|
||||||
# Clean up PR mirror if enabled
|
# Clean up remote mirror override if enabled
|
||||||
if pr_mirror_url:
|
if remote_mirror_override:
|
||||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||||
|
|
||||||
all_job_names = []
|
all_job_names = []
|
||||||
@@ -804,7 +870,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
max_needs_job = ''
|
max_needs_job = ''
|
||||||
|
|
||||||
# If this is configured, spack will fail "spack ci generate" if it
|
# If this is configured, spack will fail "spack ci generate" if it
|
||||||
# generates any full hash which exists under the broken specs url.
|
# generates any hash which exists under the broken specs url.
|
||||||
broken_spec_urls = None
|
broken_spec_urls = None
|
||||||
if broken_specs_url:
|
if broken_specs_url:
|
||||||
if broken_specs_url.startswith('http'):
|
if broken_specs_url.startswith('http'):
|
||||||
@@ -819,7 +885,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
phase_name = phase['name']
|
phase_name = phase['name']
|
||||||
strip_compilers = phase['strip-compilers']
|
strip_compilers = phase['strip-compilers']
|
||||||
|
|
||||||
main_phase = is_main_phase(phase_name)
|
main_phase = _is_main_phase(phase_name)
|
||||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||||
|
|
||||||
for stage_jobs in stages:
|
for stage_jobs in stages:
|
||||||
@@ -830,11 +896,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
for spec_label in stage_jobs:
|
for spec_label in stage_jobs:
|
||||||
spec_record = spec_labels[spec_label]
|
spec_record = spec_labels[spec_label]
|
||||||
root_spec = spec_record['rootSpec']
|
root_spec = spec_record['rootSpec']
|
||||||
pkg_name = pkg_name_from_spec_label(spec_label)
|
pkg_name = _pkg_name_from_spec_label(spec_label)
|
||||||
release_spec = root_spec[pkg_name]
|
release_spec = root_spec[pkg_name]
|
||||||
release_spec_full_hash = release_spec.full_hash()
|
|
||||||
release_spec_dag_hash = release_spec.dag_hash()
|
release_spec_dag_hash = release_spec.dag_hash()
|
||||||
release_spec_build_hash = release_spec.build_hash()
|
|
||||||
|
|
||||||
if prune_untouched_packages:
|
if prune_untouched_packages:
|
||||||
if release_spec not in affected_specs:
|
if release_spec not in affected_specs:
|
||||||
@@ -843,7 +907,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
spec_record['needs_rebuild'] = False
|
spec_record['needs_rebuild'] = False
|
||||||
continue
|
continue
|
||||||
|
|
||||||
runner_attribs = find_matching_config(
|
runner_attribs = _find_matching_config(
|
||||||
release_spec, gitlab_ci)
|
release_spec, gitlab_ci)
|
||||||
|
|
||||||
if not runner_attribs:
|
if not runner_attribs:
|
||||||
@@ -853,6 +917,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
|
|
||||||
tags = [tag for tag in runner_attribs['tags']]
|
tags = [tag for tag in runner_attribs['tags']]
|
||||||
|
|
||||||
|
if spack_pipeline_type is not None:
|
||||||
|
# For spack pipelines "public" and "protected" are reserved tags
|
||||||
|
tags = _remove_reserved_tags(tags)
|
||||||
|
if spack_pipeline_type == 'spack_protected_branch':
|
||||||
|
tags.extend(['aws', 'protected'])
|
||||||
|
elif spack_pipeline_type == 'spack_pull_request':
|
||||||
|
tags.extend(['public'])
|
||||||
|
|
||||||
variables = {}
|
variables = {}
|
||||||
if 'variables' in runner_attribs:
|
if 'variables' in runner_attribs:
|
||||||
variables.update(runner_attribs['variables'])
|
variables.update(runner_attribs['variables'])
|
||||||
@@ -897,15 +969,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
compiler_action = 'NONE'
|
compiler_action = 'NONE'
|
||||||
if len(phases) > 1:
|
if len(phases) > 1:
|
||||||
compiler_action = 'FIND_ANY'
|
compiler_action = 'FIND_ANY'
|
||||||
if is_main_phase(phase_name):
|
if _is_main_phase(phase_name):
|
||||||
compiler_action = 'INSTALL_MISSING'
|
compiler_action = 'INSTALL_MISSING'
|
||||||
|
|
||||||
job_vars = {
|
job_vars = {
|
||||||
'SPACK_ROOT_SPEC': format_root_spec(
|
'SPACK_ROOT_SPEC': _format_root_spec(
|
||||||
root_spec, main_phase, strip_compilers),
|
root_spec, main_phase, strip_compilers),
|
||||||
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
||||||
'SPACK_JOB_SPEC_BUILD_HASH': release_spec_build_hash,
|
|
||||||
'SPACK_JOB_SPEC_FULL_HASH': release_spec_full_hash,
|
|
||||||
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
||||||
'SPACK_COMPILER_ACTION': compiler_action
|
'SPACK_COMPILER_ACTION': compiler_action
|
||||||
}
|
}
|
||||||
@@ -924,15 +994,15 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
# purposes, so we only get the direct dependencies.
|
# purposes, so we only get the direct dependencies.
|
||||||
dep_jobs = []
|
dep_jobs = []
|
||||||
for dep_label in dependencies[spec_label]:
|
for dep_label in dependencies[spec_label]:
|
||||||
dep_pkg = pkg_name_from_spec_label(dep_label)
|
dep_pkg = _pkg_name_from_spec_label(dep_label)
|
||||||
dep_root = spec_labels[dep_label]['rootSpec']
|
dep_root = spec_labels[dep_label]['rootSpec']
|
||||||
dep_jobs.append(dep_root[dep_pkg])
|
dep_jobs.append(dep_root[dep_pkg])
|
||||||
|
|
||||||
job_dependencies.extend(
|
job_dependencies.extend(
|
||||||
format_job_needs(phase_name, strip_compilers,
|
_format_job_needs(phase_name, strip_compilers,
|
||||||
dep_jobs, osname, build_group,
|
dep_jobs, osname, build_group,
|
||||||
prune_dag, spec_labels,
|
prune_dag, spec_labels,
|
||||||
enable_artifacts_buildcache))
|
enable_artifacts_buildcache))
|
||||||
|
|
||||||
rebuild_spec = spec_record['needs_rebuild']
|
rebuild_spec = spec_record['needs_rebuild']
|
||||||
|
|
||||||
@@ -943,7 +1013,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
# compiler we are supposed to use is listed in any of the
|
# compiler we are supposed to use is listed in any of the
|
||||||
# bootstrap spec lists, then we will add more dependencies to
|
# bootstrap spec lists, then we will add more dependencies to
|
||||||
# the job (that compiler and maybe it's dependencies as well).
|
# the job (that compiler and maybe it's dependencies as well).
|
||||||
if is_main_phase(phase_name):
|
if _is_main_phase(phase_name):
|
||||||
spec_arch_family = (release_spec.architecture
|
spec_arch_family = (release_spec.architecture
|
||||||
.target
|
.target
|
||||||
.microarchitecture
|
.microarchitecture
|
||||||
@@ -971,7 +1041,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
# be rebuilt if the compiler targeted to build it
|
# be rebuilt if the compiler targeted to build it
|
||||||
# needs to be rebuilt.
|
# needs to be rebuilt.
|
||||||
bs_specs, _, _ = staged_phases[bs['phase-name']]
|
bs_specs, _, _ = staged_phases[bs['phase-name']]
|
||||||
c_spec_key = spec_deps_key(c_spec)
|
c_spec_key = _spec_deps_key(c_spec)
|
||||||
rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
|
rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
|
||||||
rebuild_spec = rebuild_spec or rbld_comp
|
rebuild_spec = rebuild_spec or rbld_comp
|
||||||
# Also update record so dependents do not fail to
|
# Also update record so dependents do not fail to
|
||||||
@@ -985,14 +1055,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
]
|
]
|
||||||
|
|
||||||
job_dependencies.extend(
|
job_dependencies.extend(
|
||||||
format_job_needs(bs['phase-name'],
|
_format_job_needs(bs['phase-name'],
|
||||||
bs['strip-compilers'],
|
bs['strip-compilers'],
|
||||||
dep_jobs,
|
dep_jobs,
|
||||||
str(bs_arch),
|
str(bs_arch),
|
||||||
build_group,
|
build_group,
|
||||||
prune_dag,
|
prune_dag,
|
||||||
bs_specs,
|
bs_specs,
|
||||||
enable_artifacts_buildcache))
|
enable_artifacts_buildcache))
|
||||||
else:
|
else:
|
||||||
debug_msg = ''.join([
|
debug_msg = ''.join([
|
||||||
'Considered compiler {0} for spec ',
|
'Considered compiler {0} for spec ',
|
||||||
@@ -1009,9 +1079,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if (broken_spec_urls is not None and
|
if (broken_spec_urls is not None and
|
||||||
release_spec_full_hash in broken_spec_urls):
|
release_spec_dag_hash in broken_spec_urls):
|
||||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||||
release_spec, release_spec_full_hash))
|
release_spec, release_spec_dag_hash))
|
||||||
|
|
||||||
if artifacts_root:
|
if artifacts_root:
|
||||||
job_dependencies.append({
|
job_dependencies.append({
|
||||||
@@ -1022,7 +1092,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)
|
job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)
|
||||||
|
|
||||||
if enable_cdash_reporting:
|
if enable_cdash_reporting:
|
||||||
cdash_build_name = get_cdash_build_name(
|
cdash_build_name = _get_cdash_build_name(
|
||||||
release_spec, build_group)
|
release_spec, build_group)
|
||||||
all_job_names.append(cdash_build_name)
|
all_job_names.append(cdash_build_name)
|
||||||
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
|
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
|
||||||
@@ -1087,7 +1157,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
phase_name = phase['name']
|
phase_name = phase['name']
|
||||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||||
phase_stages = staged_phases[phase_name]
|
phase_stages = staged_phases[phase_name]
|
||||||
print_staging_summary(*phase_stages)
|
_print_staging_summary(*phase_stages)
|
||||||
|
|
||||||
tty.debug('{0} build jobs generated in {1} stages'.format(
|
tty.debug('{0} build jobs generated in {1} stages'.format(
|
||||||
job_id, stage_id))
|
job_id, stage_id))
|
||||||
@@ -1099,8 +1169,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
# Use "all_job_names" to populate the build group for this set
|
# Use "all_job_names" to populate the build group for this set
|
||||||
if enable_cdash_reporting and cdash_auth_token:
|
if enable_cdash_reporting and cdash_auth_token:
|
||||||
try:
|
try:
|
||||||
populate_buildgroup(all_job_names, build_group, cdash_project,
|
_populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||||
cdash_site, cdash_auth_token, cdash_url)
|
cdash_site, cdash_auth_token, cdash_url)
|
||||||
except (SpackError, HTTPError, URLError) as err:
|
except (SpackError, HTTPError, URLError) as err:
|
||||||
tty.warn('Problem populating buildgroup: {0}'.format(err))
|
tty.warn('Problem populating buildgroup: {0}'.format(err))
|
||||||
else:
|
else:
|
||||||
@@ -1136,9 +1206,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
cleanup_job = {}
|
cleanup_job = {}
|
||||||
|
|
||||||
if service_job_config:
|
if service_job_config:
|
||||||
copy_attributes(default_attrs,
|
_copy_attributes(default_attrs,
|
||||||
service_job_config,
|
service_job_config,
|
||||||
cleanup_job)
|
cleanup_job)
|
||||||
|
|
||||||
|
if 'tags' in cleanup_job:
|
||||||
|
service_tags = _remove_reserved_tags(cleanup_job['tags'])
|
||||||
|
cleanup_job['tags'] = service_tags
|
||||||
|
|
||||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||||
cleanup_job['script'] = [
|
cleanup_job['script'] = [
|
||||||
@@ -1147,22 +1221,91 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
]
|
]
|
||||||
cleanup_job['when'] = 'always'
|
cleanup_job['when'] = 'always'
|
||||||
cleanup_job['retry'] = service_job_retries
|
cleanup_job['retry'] = service_job_retries
|
||||||
|
cleanup_job['interruptible'] = True
|
||||||
|
|
||||||
output_object['cleanup'] = cleanup_job
|
output_object['cleanup'] = cleanup_job
|
||||||
|
|
||||||
|
if ('signing-job-attributes' in gitlab_ci and
|
||||||
|
spack_pipeline_type == 'spack_protected_branch'):
|
||||||
|
# External signing: generate a job to check and sign binary pkgs
|
||||||
|
stage_names.append('stage-sign-pkgs')
|
||||||
|
signing_job_config = gitlab_ci['signing-job-attributes']
|
||||||
|
signing_job = {}
|
||||||
|
|
||||||
|
signing_job_attrs_to_copy = [
|
||||||
|
'image',
|
||||||
|
'tags',
|
||||||
|
'variables',
|
||||||
|
'before_script',
|
||||||
|
'script',
|
||||||
|
'after_script',
|
||||||
|
]
|
||||||
|
|
||||||
|
_copy_attributes(signing_job_attrs_to_copy,
|
||||||
|
signing_job_config,
|
||||||
|
signing_job)
|
||||||
|
|
||||||
|
signing_job_tags = []
|
||||||
|
if 'tags' in signing_job:
|
||||||
|
signing_job_tags = _remove_reserved_tags(signing_job['tags'])
|
||||||
|
|
||||||
|
for tag in ['aws', 'protected', 'notary']:
|
||||||
|
if tag not in signing_job_tags:
|
||||||
|
signing_job_tags.append(tag)
|
||||||
|
signing_job['tags'] = signing_job_tags
|
||||||
|
|
||||||
|
signing_job['stage'] = 'stage-sign-pkgs'
|
||||||
|
signing_job['when'] = 'always'
|
||||||
|
signing_job['retry'] = {
|
||||||
|
'max': 2,
|
||||||
|
'when': ['always']
|
||||||
|
}
|
||||||
|
signing_job['interruptible'] = True
|
||||||
|
|
||||||
|
output_object['sign-pkgs'] = signing_job
|
||||||
|
|
||||||
|
if spack_buildcache_copy:
|
||||||
|
# Generate a job to copy the contents from wherever the builds are getting
|
||||||
|
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
|
||||||
|
# variable.
|
||||||
|
src_url = remote_mirror_override or remote_mirror_url
|
||||||
|
dest_url = spack_buildcache_copy
|
||||||
|
|
||||||
|
stage_names.append('stage-copy-buildcache')
|
||||||
|
copy_job = {
|
||||||
|
'stage': 'stage-copy-buildcache',
|
||||||
|
'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
|
||||||
|
'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
|
||||||
|
'when': 'on_success',
|
||||||
|
'interruptible': True,
|
||||||
|
'retry': service_job_retries,
|
||||||
|
'script': [
|
||||||
|
'. ./share/spack/setup-env.sh',
|
||||||
|
'spack --version',
|
||||||
|
'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
|
||||||
|
src_url, dest_url)
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
output_object['copy-mirror'] = copy_job
|
||||||
|
|
||||||
if rebuild_index_enabled:
|
if rebuild_index_enabled:
|
||||||
# Add a final job to regenerate the index
|
# Add a final job to regenerate the index
|
||||||
stage_names.append('stage-rebuild-index')
|
stage_names.append('stage-rebuild-index')
|
||||||
final_job = {}
|
final_job = {}
|
||||||
|
|
||||||
if service_job_config:
|
if service_job_config:
|
||||||
copy_attributes(default_attrs,
|
_copy_attributes(default_attrs,
|
||||||
service_job_config,
|
service_job_config,
|
||||||
final_job)
|
final_job)
|
||||||
|
|
||||||
|
if 'tags' in final_job:
|
||||||
|
service_tags = _remove_reserved_tags(final_job['tags'])
|
||||||
|
final_job['tags'] = service_tags
|
||||||
|
|
||||||
index_target_mirror = mirror_urls[0]
|
index_target_mirror = mirror_urls[0]
|
||||||
if is_pr_pipeline:
|
if remote_mirror_override:
|
||||||
index_target_mirror = pr_mirror_url
|
index_target_mirror = remote_mirror_override
|
||||||
|
|
||||||
final_job['stage'] = 'stage-rebuild-index'
|
final_job['stage'] = 'stage-rebuild-index'
|
||||||
final_job['script'] = [
|
final_job['script'] = [
|
||||||
@@ -1171,6 +1314,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
]
|
]
|
||||||
final_job['when'] = 'always'
|
final_job['when'] = 'always'
|
||||||
final_job['retry'] = service_job_retries
|
final_job['retry'] = service_job_retries
|
||||||
|
final_job['interruptible'] = True
|
||||||
|
|
||||||
output_object['rebuild-index'] = final_job
|
output_object['rebuild-index'] = final_job
|
||||||
|
|
||||||
@@ -1203,8 +1347,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
||||||
}
|
}
|
||||||
|
|
||||||
if pr_mirror_url:
|
if remote_mirror_override:
|
||||||
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
|
(output_object['variables']
|
||||||
|
['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
|
||||||
|
|
||||||
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
||||||
if spack_stack_name:
|
if spack_stack_name:
|
||||||
@@ -1229,9 +1374,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
noop_job = {}
|
noop_job = {}
|
||||||
|
|
||||||
if service_job_config:
|
if service_job_config:
|
||||||
copy_attributes(default_attrs,
|
_copy_attributes(default_attrs,
|
||||||
service_job_config,
|
service_job_config,
|
||||||
noop_job)
|
noop_job)
|
||||||
|
|
||||||
if 'script' not in noop_job:
|
if 'script' not in noop_job:
|
||||||
noop_job['script'] = [
|
noop_job['script'] = [
|
||||||
@@ -1254,7 +1399,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||||
|
|
||||||
|
|
||||||
def url_encode_string(input_string):
|
def _url_encode_string(input_string):
|
||||||
encoded_keyval = urlencode({'donotcare': input_string})
|
encoded_keyval = urlencode({'donotcare': input_string})
|
||||||
eq_idx = encoded_keyval.find('=') + 1
|
eq_idx = encoded_keyval.find('=') + 1
|
||||||
encoded_value = encoded_keyval[eq_idx:]
|
encoded_value = encoded_keyval[eq_idx:]
|
||||||
@@ -1262,6 +1407,17 @@ def url_encode_string(input_string):
|
|||||||
|
|
||||||
|
|
||||||
def import_signing_key(base64_signing_key):
|
def import_signing_key(base64_signing_key):
|
||||||
|
""" Given Base64-encoded gpg key, decode and import it to use for
|
||||||
|
signing packages.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
base64_signing_key (str): A gpg key including the secret key,
|
||||||
|
armor-exported and base64 encoded, so it can be stored in a
|
||||||
|
gitlab CI variable. For an example of how to generate such
|
||||||
|
a key, see:
|
||||||
|
|
||||||
|
https://github.com/spack/spack-infrastructure/blob/main/gitlab-docker/files/gen-key
|
||||||
|
"""
|
||||||
if not base64_signing_key:
|
if not base64_signing_key:
|
||||||
tty.warn('No key found for signing/verifying packages')
|
tty.warn('No key found for signing/verifying packages')
|
||||||
return
|
return
|
||||||
@@ -1299,14 +1455,34 @@ def import_signing_key(base64_signing_key):
|
|||||||
|
|
||||||
|
|
||||||
def can_sign_binaries():
|
def can_sign_binaries():
|
||||||
|
""" Utility method to determine if this spack instance is capable of
|
||||||
|
signing binary packages. This is currently only possible if the
|
||||||
|
spack gpg keystore contains exactly one secret key."""
|
||||||
return len(gpg_util.signing_keys()) == 1
|
return len(gpg_util.signing_keys()) == 1
|
||||||
|
|
||||||
|
|
||||||
def can_verify_binaries():
|
def can_verify_binaries():
|
||||||
|
""" Utility method to determin if this spack instance is capable (at
|
||||||
|
least in theory) of verifying signed binaries."""
|
||||||
return len(gpg_util.public_keys()) >= 1
|
return len(gpg_util.public_keys()) >= 1
|
||||||
|
|
||||||
|
|
||||||
def configure_compilers(compiler_action, scope=None):
|
def configure_compilers(compiler_action, scope=None):
|
||||||
|
""" Depending on the compiler_action parameter, either turn on the
|
||||||
|
install_missing_compilers config option, or find spack compilers,
|
||||||
|
or do nothing. This is used from rebuild jobs in bootstrapping
|
||||||
|
pipelines, where in the bootsrapping phase we would pass
|
||||||
|
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
||||||
|
spec building phase we would pass INSTALL_MISSING in order to get
|
||||||
|
spack to use the compiler which was built in the previous phase and
|
||||||
|
is now sitting in the binary mirror.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
||||||
|
described above. Any other value essentially results in a no-op.
|
||||||
|
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
||||||
|
compilers, in case 'FIND_ANY' was provided.
|
||||||
|
"""
|
||||||
if compiler_action == 'INSTALL_MISSING':
|
if compiler_action == 'INSTALL_MISSING':
|
||||||
tty.debug('Make sure bootstrapped compiler will be installed')
|
tty.debug('Make sure bootstrapped compiler will be installed')
|
||||||
config = cfg.get('config')
|
config = cfg.get('config')
|
||||||
@@ -1330,6 +1506,35 @@ def configure_compilers(compiler_action, scope=None):
|
|||||||
|
|
||||||
|
|
||||||
def get_concrete_specs(env, root_spec, job_name, compiler_action):
|
def get_concrete_specs(env, root_spec, job_name, compiler_action):
|
||||||
|
""" Build a dictionary of concrete specs relevant to a particular
|
||||||
|
rebuild job. This includes the root spec and the spec to be
|
||||||
|
rebuilt (which could be the same).
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
env (spack.environment.Environment): Activated spack environment
|
||||||
|
used to get concrete root spec by hash in case compiler_action
|
||||||
|
is anthing other than FIND_ANY.
|
||||||
|
root_spec (str): If compiler_action is FIND_ANY root_spec is
|
||||||
|
a string representation which can be turned directly into
|
||||||
|
a spec, otherwise, it's a hash used to index the activated
|
||||||
|
spack environment.
|
||||||
|
job_name (str): Name of package to be built, used to index the
|
||||||
|
concrete root spec and produce the concrete spec to be
|
||||||
|
built.
|
||||||
|
compiler_action (str): Determines how to interpret the root_spec
|
||||||
|
parameter, either as a string representation as a hash.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
.. code-block:: JSON
|
||||||
|
|
||||||
|
{
|
||||||
|
"root": "<spec>",
|
||||||
|
"<job-pkg-name>": "<spec>",
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
spec_map = {
|
spec_map = {
|
||||||
'root': None,
|
'root': None,
|
||||||
}
|
}
|
||||||
@@ -1376,6 +1581,19 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
|||||||
|
|
||||||
|
|
||||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||||
|
""" Push one or more binary packages to the mirror.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
env (spack.environment.Environment): Optional environment. If
|
||||||
|
provided, it is used to make sure binary package to push
|
||||||
|
exists in the environment.
|
||||||
|
specfile_path (str): Path to spec.json corresponding to built pkg
|
||||||
|
to push.
|
||||||
|
mirror_url (str): Base url of target mirror
|
||||||
|
sign_binaries (bool): If True, spack will attempt to sign binary
|
||||||
|
package before pushing.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
|
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
|
||||||
except Exception as inst:
|
except Exception as inst:
|
||||||
@@ -1400,9 +1618,19 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
|||||||
|
|
||||||
|
|
||||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||||
|
""" Looks for spack-build-out.txt in the stage directory of the given
|
||||||
|
job_spec, and attempts to copy the file into the directory given
|
||||||
|
by job_log_dir.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
job_spec (spack.spec.Spec): Spec associated with spack install log
|
||||||
|
job_log_dir (str): Path into which build log should be copied
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
job_pkg = spack.repo.get(job_spec)
|
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||||
tty.debug('job package: {0}'.format(job_pkg))
|
job_pkg = pkg_cls(job_spec)
|
||||||
|
tty.debug('job package: {0.fullname}'.format(job_pkg))
|
||||||
stage_dir = job_pkg.stage.path
|
stage_dir = job_pkg.stage.path
|
||||||
tty.debug('stage dir: {0}'.format(stage_dir))
|
tty.debug('stage dir: {0}'.format(stage_dir))
|
||||||
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
||||||
@@ -1418,6 +1646,14 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
|||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir):
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
|
""" Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
|
and extract the contents into the given work_dir
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
url (str): Complete url to artifacts.zip file
|
||||||
|
work_dir (str): Path to destination where artifacts should be extracted
|
||||||
|
"""
|
||||||
tty.msg('Fetching artifacts from: {0}\n'.format(url))
|
tty.msg('Fetching artifacts from: {0}\n'.format(url))
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
@@ -1457,6 +1693,8 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
|
|
||||||
|
|
||||||
def get_spack_info():
|
def get_spack_info():
|
||||||
|
""" If spack is running from a git repo, return the most recent git log
|
||||||
|
entry, otherwise, return a string containing the spack version. """
|
||||||
git_path = os.path.join(spack.paths.prefix, ".git")
|
git_path = os.path.join(spack.paths.prefix, ".git")
|
||||||
if os.path.exists(git_path):
|
if os.path.exists(git_path):
|
||||||
git = exe.which("git")
|
git = exe.which("git")
|
||||||
@@ -1472,6 +1710,23 @@ def get_spack_info():
|
|||||||
|
|
||||||
|
|
||||||
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||||
|
""" Look in the local spack clone to find the checkout_commit, and if
|
||||||
|
provided, the merge_commit given as arguments. If those commits can
|
||||||
|
be found locally, then clone spack and attempt to recreate a merge
|
||||||
|
commit with the same parent commits as tested in gitlab. This looks
|
||||||
|
something like 1) git clone repo && cd repo 2) git checkout
|
||||||
|
<checkout_commit> 3) git merge <merge_commit>. If there is no
|
||||||
|
merge_commit provided, then skip step (3).
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
repro_dir (str): Location where spack should be cloned
|
||||||
|
checkout_commit (str): SHA of PR branch commit
|
||||||
|
merge_commit (str): SHA of target branch parent
|
||||||
|
|
||||||
|
Returns: True if git repo state was successfully recreated, or False
|
||||||
|
otherwise.
|
||||||
|
"""
|
||||||
# figure out the path to the spack git version being used for the
|
# figure out the path to the spack git version being used for the
|
||||||
# reproduction
|
# reproduction
|
||||||
print('checkout_commit: {0}'.format(checkout_commit))
|
print('checkout_commit: {0}'.format(checkout_commit))
|
||||||
@@ -1513,7 +1768,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
|||||||
fail_on_error=False)
|
fail_on_error=False)
|
||||||
|
|
||||||
if git.returncode != 0:
|
if git.returncode != 0:
|
||||||
tty.error('Unable to clone your local spac repo:')
|
tty.error('Unable to clone your local spack repo:')
|
||||||
tty.msg(clone_out)
|
tty.msg(clone_out)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -1546,6 +1801,18 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
|||||||
|
|
||||||
|
|
||||||
def reproduce_ci_job(url, work_dir):
|
def reproduce_ci_job(url, work_dir):
|
||||||
|
""" Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||||
|
attempt to setup an environment in which the failure can be reproduced
|
||||||
|
locally. This entails the following:
|
||||||
|
|
||||||
|
First download and extract artifacts. Then look through those artifacts
|
||||||
|
to glean some information needed for the reproduer (e.g. one of the
|
||||||
|
artifacts contains information about the version of spack tested by
|
||||||
|
gitlab, another is the generated pipeline yaml containing details
|
||||||
|
of the job like the docker image used to run it). The output of this
|
||||||
|
function is a set of printed instructions for running docker and then
|
||||||
|
commands to run to reproduce the build once inside the container.
|
||||||
|
"""
|
||||||
download_and_extract_artifacts(url, work_dir)
|
download_and_extract_artifacts(url, work_dir)
|
||||||
|
|
||||||
lock_file = fs.find(work_dir, 'spack.lock')[0]
|
lock_file = fs.find(work_dir, 'spack.lock')[0]
|
||||||
|
|||||||
@@ -8,7 +8,10 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
|
from textwrap import dedent
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
import ruamel.yaml as yaml
|
import ruamel.yaml as yaml
|
||||||
import six
|
import six
|
||||||
@@ -147,6 +150,58 @@ def get_command(cmd_name):
|
|||||||
return getattr(get_module(cmd_name), pname)
|
return getattr(get_module(cmd_name), pname)
|
||||||
|
|
||||||
|
|
||||||
|
class _UnquotedFlags(object):
|
||||||
|
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||||
|
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||||
|
|
||||||
|
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||||
|
message explaining how to quote multiple flags correctly can be generated with
|
||||||
|
`.report()`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
flags_arg_pattern = re.compile(
|
||||||
|
r'^({0})=([^\'"].*)$'.format(
|
||||||
|
'|'.join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||||
|
))
|
||||||
|
|
||||||
|
def __init__(self, all_unquoted_flag_pairs):
|
||||||
|
# type: (List[Tuple[re.Match, str]]) -> None
|
||||||
|
self._flag_pairs = all_unquoted_flag_pairs
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
# type: () -> bool
|
||||||
|
return bool(self._flag_pairs)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def extract(cls, sargs):
|
||||||
|
# type: (str) -> _UnquotedFlags
|
||||||
|
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||||
|
prev_flags_arg = None
|
||||||
|
for arg in shlex.split(sargs):
|
||||||
|
if prev_flags_arg is not None:
|
||||||
|
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||||
|
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||||
|
return cls(all_unquoted_flag_pairs)
|
||||||
|
|
||||||
|
def report(self):
|
||||||
|
# type: () -> str
|
||||||
|
single_errors = [
|
||||||
|
'({0}) {1} {2} => {3}'.format(
|
||||||
|
i + 1, match.group(0), next_arg,
|
||||||
|
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||||
|
)
|
||||||
|
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||||
|
]
|
||||||
|
return dedent("""\
|
||||||
|
Some compiler or linker flags were provided without quoting their arguments,
|
||||||
|
which now causes spack to try to parse the *next* argument as a spec component
|
||||||
|
such as a variant instead of an additional compiler or linker flag. If the
|
||||||
|
intent was to set multiple flags, try quoting them together as described below.
|
||||||
|
|
||||||
|
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||||
|
{0}""").format('\n'.join(single_errors))
|
||||||
|
|
||||||
|
|
||||||
def parse_specs(args, **kwargs):
|
def parse_specs(args, **kwargs):
|
||||||
"""Convenience function for parsing arguments from specs. Handles common
|
"""Convenience function for parsing arguments from specs. Handles common
|
||||||
exceptions and dies if there are errors.
|
exceptions and dies if there are errors.
|
||||||
@@ -155,29 +210,28 @@ def parse_specs(args, **kwargs):
|
|||||||
normalize = kwargs.get('normalize', False)
|
normalize = kwargs.get('normalize', False)
|
||||||
tests = kwargs.get('tests', False)
|
tests = kwargs.get('tests', False)
|
||||||
|
|
||||||
|
sargs = args
|
||||||
|
if not isinstance(args, six.string_types):
|
||||||
|
sargs = ' '.join(args)
|
||||||
|
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sargs = args
|
|
||||||
if not isinstance(args, six.string_types):
|
|
||||||
sargs = ' '.join(spack.util.string.quote(args))
|
|
||||||
specs = spack.spec.parse(sargs)
|
specs = spack.spec.parse(sargs)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
if concretize:
|
if concretize:
|
||||||
spec.concretize(tests=tests) # implies normalize
|
spec.concretize(tests=tests) # implies normalize
|
||||||
elif normalize:
|
elif normalize:
|
||||||
spec.normalize(tests=tests)
|
spec.normalize(tests=tests)
|
||||||
|
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
except spack.spec.SpecParseError as e:
|
|
||||||
msg = e.message + "\n" + str(e.string) + "\n"
|
|
||||||
msg += (e.pos + 2) * " " + "^"
|
|
||||||
raise spack.error.SpackError(msg)
|
|
||||||
|
|
||||||
except spack.error.SpecError as e:
|
except spack.error.SpecError as e:
|
||||||
|
|
||||||
msg = e.message
|
msg = e.message
|
||||||
if e.long_message:
|
if e.long_message:
|
||||||
msg += e.long_message
|
msg += e.long_message
|
||||||
|
if unquoted_flags:
|
||||||
|
msg += '\n\n'
|
||||||
|
msg += unquoted_flags.report()
|
||||||
|
|
||||||
raise spack.error.SpackError(msg)
|
raise spack.error.SpackError(msg)
|
||||||
|
|
||||||
|
|||||||
@@ -1,116 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack.analyzers
|
|
||||||
import spack.build_environment
|
|
||||||
import spack.cmd
|
|
||||||
import spack.cmd.common.arguments as arguments
|
|
||||||
import spack.environment as ev
|
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.monitor
|
|
||||||
import spack.paths
|
|
||||||
import spack.report
|
|
||||||
|
|
||||||
description = "run analyzers on installed packages"
|
|
||||||
section = "analysis"
|
|
||||||
level = "long"
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
|
||||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='analyze_command')
|
|
||||||
|
|
||||||
sp.add_parser('list-analyzers',
|
|
||||||
description="list available analyzers",
|
|
||||||
help="show list of analyzers that are available to run.")
|
|
||||||
|
|
||||||
# This adds the monitor group to the subparser
|
|
||||||
spack.monitor.get_monitor_group(subparser)
|
|
||||||
|
|
||||||
# Run Parser
|
|
||||||
run_parser = sp.add_parser('run', description="run an analyzer",
|
|
||||||
help="provide the name of the analyzer to run.")
|
|
||||||
|
|
||||||
run_parser.add_argument(
|
|
||||||
'--overwrite', action='store_true',
|
|
||||||
help="re-analyze even if the output file already exists.")
|
|
||||||
run_parser.add_argument(
|
|
||||||
'-p', '--path', default=None,
|
|
||||||
dest='path',
|
|
||||||
help="write output to a different directory than ~/.spack/analyzers")
|
|
||||||
run_parser.add_argument(
|
|
||||||
'-a', '--analyzers', default=None,
|
|
||||||
dest="analyzers", action="append",
|
|
||||||
help="add an analyzer (defaults to all available)")
|
|
||||||
arguments.add_common_arguments(run_parser, ['spec'])
|
|
||||||
|
|
||||||
|
|
||||||
def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=False):
|
|
||||||
"""
|
|
||||||
Do an analysis for a spec, optionally adding monitoring.
|
|
||||||
|
|
||||||
We also allow the user to specify a custom output directory.
|
|
||||||
analyze_spec(spec, args.analyzers, args.outdir, monitor)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
spec (spack.spec.Spec): spec object of installed package
|
|
||||||
analyzers (list): list of analyzer (keys) to run
|
|
||||||
monitor (spack.monitor.SpackMonitorClient): a monitor client
|
|
||||||
overwrite (bool): overwrite result if already exists
|
|
||||||
"""
|
|
||||||
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())
|
|
||||||
|
|
||||||
# Load the build environment from the spec install directory, and send
|
|
||||||
# the spec to the monitor if it's not known
|
|
||||||
if monitor:
|
|
||||||
monitor.load_build_environment(spec)
|
|
||||||
monitor.new_configuration([spec])
|
|
||||||
|
|
||||||
for name in analyzers:
|
|
||||||
|
|
||||||
# Instantiate the analyzer with the spec and outdir
|
|
||||||
analyzer = spack.analyzers.get_analyzer(name)(spec, outdir)
|
|
||||||
|
|
||||||
# Run the analyzer to get a json result - results are returned as
|
|
||||||
# a dictionary with a key corresponding to the analyzer type, so
|
|
||||||
# we can just update the data
|
|
||||||
result = analyzer.run()
|
|
||||||
|
|
||||||
# Send the result. We do them separately because:
|
|
||||||
# 1. each analyzer might have differently organized output
|
|
||||||
# 2. the size of a result can be large
|
|
||||||
analyzer.save_result(result, overwrite)
|
|
||||||
|
|
||||||
|
|
||||||
def analyze(parser, args, **kwargs):
|
|
||||||
|
|
||||||
# If the user wants to list analyzers, do so and exit
|
|
||||||
if args.analyze_command == "list-analyzers":
|
|
||||||
spack.analyzers.list_all()
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
# handle active environment, if any
|
|
||||||
env = ev.active_environment()
|
|
||||||
|
|
||||||
# Get an disambiguate spec (we should only have one)
|
|
||||||
specs = spack.cmd.parse_specs(args.spec)
|
|
||||||
if not specs:
|
|
||||||
tty.die("You must provide one or more specs to analyze.")
|
|
||||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
|
||||||
|
|
||||||
# The user wants to monitor builds using github.com/spack/spack-monitor
|
|
||||||
# It is instantianted once here, and then available at spack.monitor.cli
|
|
||||||
monitor = None
|
|
||||||
if args.use_monitor:
|
|
||||||
monitor = spack.monitor.get_client(
|
|
||||||
host=args.monitor_host,
|
|
||||||
prefix=args.monitor_prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Run the analysis
|
|
||||||
analyze_spec(spec, args.analyzers, args.path, monitor, args.overwrite)
|
|
||||||
@@ -99,8 +99,8 @@ def blame(parser, args):
|
|||||||
blame_file = path
|
blame_file = path
|
||||||
|
|
||||||
if not blame_file:
|
if not blame_file:
|
||||||
pkg = spack.repo.get(args.package_or_file)
|
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||||
blame_file = pkg.module.__file__.rstrip('c') # .pyc -> .py
|
blame_file = pkg_cls.module.__file__.rstrip('c') # .pyc -> .py
|
||||||
|
|
||||||
# get git blame for the package
|
# get git blame for the package
|
||||||
with working_dir(spack.paths.prefix):
|
with working_dir(spack.paths.prefix):
|
||||||
|
|||||||
@@ -6,7 +6,9 @@
|
|||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
import llnl.util.tty.color
|
import llnl.util.tty.color
|
||||||
|
|
||||||
@@ -15,6 +17,9 @@
|
|||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.main
|
import spack.main
|
||||||
|
import spack.mirror
|
||||||
|
import spack.spec
|
||||||
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
|
|
||||||
description = "manage bootstrap configuration"
|
description = "manage bootstrap configuration"
|
||||||
@@ -22,6 +27,38 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
|
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||||
|
BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
|
||||||
|
|
||||||
|
#: Subdirectory where to create the mirror
|
||||||
|
LOCAL_MIRROR_DIR = 'bootstrap_cache'
|
||||||
|
|
||||||
|
# Metadata for a generated binary mirror
|
||||||
|
BINARY_METADATA = {
|
||||||
|
'type': 'buildcache',
|
||||||
|
'description': ('Buildcache copied from a public tarball available on Github.'
|
||||||
|
'The sha256 checksum of binaries is checked before installation.'),
|
||||||
|
'info': {
|
||||||
|
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
|
||||||
|
'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
|
||||||
|
'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
|
||||||
|
'tarball': BINARY_TARBALL
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
|
||||||
|
GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
|
||||||
|
|
||||||
|
# Metadata for a generated source mirror
|
||||||
|
SOURCE_METADATA = {
|
||||||
|
'type': 'install',
|
||||||
|
'description': 'Mirror with software needed to bootstrap Spack',
|
||||||
|
'info': {
|
||||||
|
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _add_scope_option(parser):
|
def _add_scope_option(parser):
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
scopes_metavar = spack.config.scopes_metavar
|
scopes_metavar = spack.config.scopes_metavar
|
||||||
@@ -67,24 +104,61 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
list = sp.add_parser(
|
list = sp.add_parser(
|
||||||
'list', help='list the methods available for bootstrapping'
|
'list', help='list all the sources of software to bootstrap Spack'
|
||||||
)
|
)
|
||||||
_add_scope_option(list)
|
_add_scope_option(list)
|
||||||
|
|
||||||
trust = sp.add_parser(
|
trust = sp.add_parser(
|
||||||
'trust', help='trust a bootstrapping method'
|
'trust', help='trust a bootstrapping source'
|
||||||
)
|
)
|
||||||
_add_scope_option(trust)
|
_add_scope_option(trust)
|
||||||
trust.add_argument(
|
trust.add_argument(
|
||||||
'name', help='name of the method to be trusted'
|
'name', help='name of the source to be trusted'
|
||||||
)
|
)
|
||||||
|
|
||||||
untrust = sp.add_parser(
|
untrust = sp.add_parser(
|
||||||
'untrust', help='untrust a bootstrapping method'
|
'untrust', help='untrust a bootstrapping source'
|
||||||
)
|
)
|
||||||
_add_scope_option(untrust)
|
_add_scope_option(untrust)
|
||||||
untrust.add_argument(
|
untrust.add_argument(
|
||||||
'name', help='name of the method to be untrusted'
|
'name', help='name of the source to be untrusted'
|
||||||
|
)
|
||||||
|
|
||||||
|
add = sp.add_parser(
|
||||||
|
'add', help='add a new source for bootstrapping'
|
||||||
|
)
|
||||||
|
_add_scope_option(add)
|
||||||
|
add.add_argument(
|
||||||
|
'--trust', action='store_true',
|
||||||
|
help='trust the source immediately upon addition')
|
||||||
|
add.add_argument(
|
||||||
|
'name', help='name of the new source of software'
|
||||||
|
)
|
||||||
|
add.add_argument(
|
||||||
|
'metadata_dir', help='directory where to find metadata files'
|
||||||
|
)
|
||||||
|
|
||||||
|
remove = sp.add_parser(
|
||||||
|
'remove', help='remove a bootstrapping source'
|
||||||
|
)
|
||||||
|
remove.add_argument(
|
||||||
|
'name', help='name of the source to be removed'
|
||||||
|
)
|
||||||
|
|
||||||
|
mirror = sp.add_parser(
|
||||||
|
'mirror', help='create a local mirror to bootstrap Spack'
|
||||||
|
)
|
||||||
|
mirror.add_argument(
|
||||||
|
'--binary-packages', action='store_true',
|
||||||
|
help='download public binaries in the mirror'
|
||||||
|
)
|
||||||
|
mirror.add_argument(
|
||||||
|
'--dev', action='store_true',
|
||||||
|
help='download dev dependencies too'
|
||||||
|
)
|
||||||
|
mirror.add_argument(
|
||||||
|
metavar='DIRECTORY', dest='root_dir',
|
||||||
|
help='root directory in which to create the mirror and metadata'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -137,10 +211,7 @@ def _root(args):
|
|||||||
|
|
||||||
|
|
||||||
def _list(args):
|
def _list(args):
|
||||||
sources = spack.config.get(
|
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||||
'bootstrap:sources', default=None, scope=args.scope
|
|
||||||
)
|
|
||||||
|
|
||||||
if not sources:
|
if not sources:
|
||||||
llnl.util.tty.msg(
|
llnl.util.tty.msg(
|
||||||
"No method available for bootstrapping Spack's dependencies"
|
"No method available for bootstrapping Spack's dependencies"
|
||||||
@@ -249,6 +320,121 @@ def _status(args):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
||||||
|
def _add(args):
|
||||||
|
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||||
|
names = [s['name'] for s in initial_sources]
|
||||||
|
|
||||||
|
# If the name is already used error out
|
||||||
|
if args.name in names:
|
||||||
|
msg = 'a source named "{0}" already exist. Please choose a different name'
|
||||||
|
raise RuntimeError(msg.format(args.name))
|
||||||
|
|
||||||
|
# Check that the metadata file exists
|
||||||
|
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
||||||
|
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||||
|
raise RuntimeError(
|
||||||
|
'the directory "{0}" does not exist'.format(args.metadata_dir)
|
||||||
|
)
|
||||||
|
|
||||||
|
file = os.path.join(metadata_dir, 'metadata.yaml')
|
||||||
|
if not os.path.exists(file):
|
||||||
|
raise RuntimeError('the file "{0}" does not exist'.format(file))
|
||||||
|
|
||||||
|
# Insert the new source as the highest priority one
|
||||||
|
write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
|
||||||
|
sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
|
||||||
|
sources = [
|
||||||
|
{'name': args.name, 'metadata': args.metadata_dir}
|
||||||
|
] + sources
|
||||||
|
spack.config.set('bootstrap:sources', sources, scope=write_scope)
|
||||||
|
|
||||||
|
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||||
|
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||||
|
if args.trust:
|
||||||
|
_trust(args)
|
||||||
|
|
||||||
|
|
||||||
|
def _remove(args):
|
||||||
|
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||||
|
names = [s['name'] for s in initial_sources]
|
||||||
|
if args.name not in names:
|
||||||
|
msg = ('cannot find any bootstrapping source named "{0}". '
|
||||||
|
'Run `spack bootstrap list` to see available sources.')
|
||||||
|
raise RuntimeError(msg.format(args.name))
|
||||||
|
|
||||||
|
for current_scope in spack.config.scopes():
|
||||||
|
sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
|
||||||
|
if args.name in [s['name'] for s in sources]:
|
||||||
|
sources = [s for s in sources if s['name'] != args.name]
|
||||||
|
spack.config.set('bootstrap:sources', sources, scope=current_scope)
|
||||||
|
msg = ('Removed the bootstrapping source named "{0}" from the '
|
||||||
|
'"{1}" configuration scope.')
|
||||||
|
llnl.util.tty.msg(msg.format(args.name, current_scope))
|
||||||
|
trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
|
||||||
|
if args.name in trusted:
|
||||||
|
trusted.pop(args.name)
|
||||||
|
spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
|
||||||
|
msg = 'Deleting information on "{0}" from list of trusted sources'
|
||||||
|
llnl.util.tty.msg(msg.format(args.name))
|
||||||
|
|
||||||
|
|
||||||
|
def _mirror(args):
|
||||||
|
mirror_dir = spack.util.path.canonicalize_path(
|
||||||
|
os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||||
|
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||||
|
# TODO: dependencies of a spec
|
||||||
|
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
|
||||||
|
for spec_str in root_specs:
|
||||||
|
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||||
|
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||||
|
# Suppress tty from the call below for terser messages
|
||||||
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
|
spec = spack.spec.Spec(spec_str).concretized()
|
||||||
|
for node in spec.traverse():
|
||||||
|
spack.mirror.create(mirror_dir, [node])
|
||||||
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
|
|
||||||
|
if args.binary_packages:
|
||||||
|
msg = 'Adding binary packages from "{0}" to the mirror at {1}'
|
||||||
|
llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
|
||||||
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
|
stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
|
||||||
|
stage.create()
|
||||||
|
stage.fetch()
|
||||||
|
stage.expand_archive()
|
||||||
|
build_cache_dir = os.path.join(stage.source_path, 'build_cache')
|
||||||
|
shutil.move(build_cache_dir, mirror_dir)
|
||||||
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
|
|
||||||
|
def write_metadata(subdir, metadata):
|
||||||
|
metadata_rel_dir = os.path.join('metadata', subdir)
|
||||||
|
metadata_yaml = os.path.join(
|
||||||
|
args.root_dir, metadata_rel_dir, 'metadata.yaml'
|
||||||
|
)
|
||||||
|
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||||
|
with open(metadata_yaml, mode='w') as f:
|
||||||
|
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||||
|
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||||
|
|
||||||
|
instructions = ('\nTo register the mirror on the platform where it\'s supposed '
|
||||||
|
'to be used, move "{0}" to its final location and run the '
|
||||||
|
'following command(s):\n\n').format(args.root_dir)
|
||||||
|
cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
|
||||||
|
_, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
|
||||||
|
instructions += cmd.format('local-sources', rel_directory)
|
||||||
|
if args.binary_packages:
|
||||||
|
abs_directory, rel_directory = write_metadata(
|
||||||
|
subdir='binaries', metadata=BINARY_METADATA
|
||||||
|
)
|
||||||
|
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||||
|
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||||
|
instructions += cmd.format('local-binaries', rel_directory)
|
||||||
|
print(instructions)
|
||||||
|
|
||||||
|
|
||||||
def bootstrap(parser, args):
|
def bootstrap(parser, args):
|
||||||
callbacks = {
|
callbacks = {
|
||||||
'status': _status,
|
'status': _status,
|
||||||
@@ -258,6 +444,9 @@ def bootstrap(parser, args):
|
|||||||
'root': _root,
|
'root': _root,
|
||||||
'list': _list,
|
'list': _list,
|
||||||
'trust': _trust,
|
'trust': _trust,
|
||||||
'untrust': _untrust
|
'untrust': _untrust,
|
||||||
|
'add': _add,
|
||||||
|
'remove': _remove,
|
||||||
|
'mirror': _mirror
|
||||||
}
|
}
|
||||||
callbacks[args.subcommand](args)
|
callbacks[args.subcommand](args)
|
||||||
|
|||||||
@@ -161,11 +161,6 @@ def setup_parser(subparser):
|
|||||||
help=('Check single spec from json or yaml file instead of release ' +
|
help=('Check single spec from json or yaml file instead of release ' +
|
||||||
'specs file'))
|
'specs file'))
|
||||||
|
|
||||||
check.add_argument(
|
|
||||||
'--rebuild-on-error', default=False, action='store_true',
|
|
||||||
help="Default to rebuilding packages if errors are encountered " +
|
|
||||||
"during the process of checking whether rebuilding is needed")
|
|
||||||
|
|
||||||
check.set_defaults(func=check_fn)
|
check.set_defaults(func=check_fn)
|
||||||
|
|
||||||
# Download tarball and specfile
|
# Download tarball and specfile
|
||||||
@@ -361,7 +356,7 @@ def list_fn(args):
|
|||||||
try:
|
try:
|
||||||
specs = bindist.update_cache_and_get_specs()
|
specs = bindist.update_cache_and_get_specs()
|
||||||
except bindist.FetchCacheError as e:
|
except bindist.FetchCacheError as e:
|
||||||
tty.error(e)
|
tty.die(e)
|
||||||
|
|
||||||
if not args.allarch:
|
if not args.allarch:
|
||||||
arch = spack.spec.Spec.default_arch()
|
arch = spack.spec.Spec.default_arch()
|
||||||
@@ -430,7 +425,7 @@ def check_fn(args):
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
sys.exit(bindist.check_specs_against_mirrors(
|
sys.exit(bindist.check_specs_against_mirrors(
|
||||||
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
|
configured_mirrors, specs, args.output_file))
|
||||||
|
|
||||||
|
|
||||||
def download_fn(args):
|
def download_fn(args):
|
||||||
@@ -483,11 +478,12 @@ def save_specfile_fn(args):
|
|||||||
if args.root_specfile:
|
if args.root_specfile:
|
||||||
with open(args.root_specfile) as fd:
|
with open(args.root_specfile) as fd:
|
||||||
root_spec_as_json = fd.read()
|
root_spec_as_json = fd.read()
|
||||||
|
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||||
else:
|
else:
|
||||||
root_spec = Spec(args.root_spec)
|
root_spec = Spec(args.root_spec)
|
||||||
root_spec.concretize()
|
root_spec.concretize()
|
||||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
spec_format = 'json'
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||||
|
|
||||||
@@ -701,7 +697,7 @@ def update_index(mirror_url, update_keys=False):
|
|||||||
|
|
||||||
def update_index_fn(args):
|
def update_index_fn(args):
|
||||||
"""Update a buildcache index."""
|
"""Update a buildcache index."""
|
||||||
outdir = '.'
|
outdir = 'file://.'
|
||||||
if args.mirror_url:
|
if args.mirror_url:
|
||||||
outdir = args.mirror_url
|
outdir = args.mirror_url
|
||||||
|
|
||||||
|
|||||||
@@ -12,11 +12,12 @@
|
|||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
from spack.package import preferred_version
|
from spack.package_base import preferred_version
|
||||||
from spack.util.naming import valid_fully_qualified_module_name
|
from spack.util.naming import valid_fully_qualified_module_name
|
||||||
from spack.version import Version, ver
|
from spack.version import VersionBase, ver
|
||||||
|
|
||||||
description = "checksum available versions of a package"
|
description = "checksum available versions of a package"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
@@ -54,7 +55,8 @@ def checksum(parser, args):
|
|||||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||||
|
|
||||||
# Get the package we're going to generate checksums for
|
# Get the package we're going to generate checksums for
|
||||||
pkg = spack.repo.get(args.package)
|
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||||
|
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||||
|
|
||||||
url_dict = {}
|
url_dict = {}
|
||||||
versions = args.versions
|
versions = args.versions
|
||||||
@@ -65,7 +67,7 @@ def checksum(parser, args):
|
|||||||
remote_versions = None
|
remote_versions = None
|
||||||
for version in versions:
|
for version in versions:
|
||||||
version = ver(version)
|
version = ver(version)
|
||||||
if not isinstance(version, Version):
|
if not isinstance(version, VersionBase):
|
||||||
tty.die("Cannot generate checksums for version lists or "
|
tty.die("Cannot generate checksums for version lists or "
|
||||||
"version ranges. Use unambiguous versions.")
|
"version ranges. Use unambiguous versions.")
|
||||||
url = pkg.find_valid_url_for_version(version)
|
url = pkg.find_valid_url_for_version(version)
|
||||||
|
|||||||
@@ -64,6 +64,11 @@ def setup_parser(subparser):
|
|||||||
'--dependencies', action='store_true', default=False,
|
'--dependencies', action='store_true', default=False,
|
||||||
help="(Experimental) disable DAG scheduling; use "
|
help="(Experimental) disable DAG scheduling; use "
|
||||||
' "plain" dependencies.')
|
' "plain" dependencies.')
|
||||||
|
generate.add_argument(
|
||||||
|
'--buildcache-destination', default=None,
|
||||||
|
help="Override the mirror configured in the environment (spack.yaml) " +
|
||||||
|
"in order to push binaries from the generated pipeline to a " +
|
||||||
|
"different location.")
|
||||||
prune_group = generate.add_mutually_exclusive_group()
|
prune_group = generate.add_mutually_exclusive_group()
|
||||||
prune_group.add_argument(
|
prune_group.add_argument(
|
||||||
'--prune-dag', action='store_true', dest='prune_dag',
|
'--prune-dag', action='store_true', dest='prune_dag',
|
||||||
@@ -127,6 +132,7 @@ def ci_generate(args):
|
|||||||
prune_dag = args.prune_dag
|
prune_dag = args.prune_dag
|
||||||
index_only = args.index_only
|
index_only = args.index_only
|
||||||
artifacts_root = args.artifacts_root
|
artifacts_root = args.artifacts_root
|
||||||
|
buildcache_destination = args.buildcache_destination
|
||||||
|
|
||||||
if not output_file:
|
if not output_file:
|
||||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||||
@@ -140,7 +146,8 @@ def ci_generate(args):
|
|||||||
spack_ci.generate_gitlab_ci_yaml(
|
spack_ci.generate_gitlab_ci_yaml(
|
||||||
env, True, output_file, prune_dag=prune_dag,
|
env, True, output_file, prune_dag=prune_dag,
|
||||||
check_index_only=index_only, run_optimizer=run_optimizer,
|
check_index_only=index_only, run_optimizer=run_optimizer,
|
||||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
|
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
|
||||||
|
remote_mirror_override=buildcache_destination)
|
||||||
|
|
||||||
if copy_yaml_to:
|
if copy_yaml_to:
|
||||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||||
@@ -167,8 +174,7 @@ def ci_reindex(args):
|
|||||||
|
|
||||||
def ci_rebuild(args):
|
def ci_rebuild(args):
|
||||||
"""Check a single spec against the remote mirror, and rebuild it from
|
"""Check a single spec against the remote mirror, and rebuild it from
|
||||||
source if the mirror does not contain the full hash match of the spec
|
source if the mirror does not contain the hash. """
|
||||||
as computed locally. """
|
|
||||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||||
|
|
||||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||||
@@ -181,6 +187,9 @@ def ci_rebuild(args):
|
|||||||
if not gitlab_ci:
|
if not gitlab_ci:
|
||||||
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
||||||
|
|
||||||
|
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
|
||||||
|
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
|
||||||
|
|
||||||
# Grab the environment variables we need. These either come from the
|
# Grab the environment variables we need. These either come from the
|
||||||
# pipeline generation step ("spack ci generate"), where they were written
|
# pipeline generation step ("spack ci generate"), where they were written
|
||||||
# out as variables, or else provided by GitLab itself.
|
# out as variables, or else provided by GitLab itself.
|
||||||
@@ -197,7 +206,7 @@ def ci_rebuild(args):
|
|||||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
|
||||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||||
|
|
||||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||||
@@ -245,6 +254,10 @@ def ci_rebuild(args):
|
|||||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||||
|
|
||||||
|
# If no override url exists, then just push binary package to the
|
||||||
|
# normal remote mirror url.
|
||||||
|
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||||
|
|
||||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||||
# force something or pipelines might not have a way to propagate build
|
# force something or pipelines might not have a way to propagate build
|
||||||
@@ -280,8 +293,8 @@ def ci_rebuild(args):
|
|||||||
env, root_spec, job_spec_pkg_name, compiler_action)
|
env, root_spec, job_spec_pkg_name, compiler_action)
|
||||||
job_spec = spec_map[job_spec_pkg_name]
|
job_spec = spec_map[job_spec_pkg_name]
|
||||||
|
|
||||||
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
|
job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
|
||||||
job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)
|
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
||||||
|
|
||||||
# To provide logs, cdash reports, etc for developer download/perusal,
|
# To provide logs, cdash reports, etc for developer download/perusal,
|
||||||
# these things have to be put into artifacts. This means downstream
|
# these things have to be put into artifacts. This means downstream
|
||||||
@@ -335,23 +348,23 @@ def ci_rebuild(args):
|
|||||||
# using a compiler already installed on the target system).
|
# using a compiler already installed on the target system).
|
||||||
spack_ci.configure_compilers(compiler_action)
|
spack_ci.configure_compilers(compiler_action)
|
||||||
|
|
||||||
# Write this job's spec yaml into the reproduction directory, and it will
|
# Write this job's spec json into the reproduction directory, and it will
|
||||||
# also be used in the generated "spack install" command to install the spec
|
# also be used in the generated "spack install" command to install the spec
|
||||||
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
|
tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
|
||||||
with open(job_spec_yaml_path, 'w') as fd:
|
with open(job_spec_json_path, 'w') as fd:
|
||||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Write the concrete root spec yaml into the reproduction directory
|
# Write the concrete root spec json into the reproduction directory
|
||||||
root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
|
root_spec_json_path = os.path.join(repro_dir, 'root.json')
|
||||||
with open(root_spec_yaml_path, 'w') as fd:
|
with open(root_spec_json_path, 'w') as fd:
|
||||||
fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
|
fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Write some other details to aid in reproduction into an artifact
|
# Write some other details to aid in reproduction into an artifact
|
||||||
repro_file = os.path.join(repro_dir, 'repro.json')
|
repro_file = os.path.join(repro_dir, 'repro.json')
|
||||||
repro_details = {
|
repro_details = {
|
||||||
'job_name': ci_job_name,
|
'job_name': ci_job_name,
|
||||||
'job_spec_yaml': job_spec_yaml_file,
|
'job_spec_json': job_spec_json_file,
|
||||||
'root_spec_yaml': 'root.yaml',
|
'root_spec_json': 'root.json',
|
||||||
'ci_project_dir': ci_project_dir
|
'ci_project_dir': ci_project_dir
|
||||||
}
|
}
|
||||||
with open(repro_file, 'w') as fd:
|
with open(repro_file, 'w') as fd:
|
||||||
@@ -366,25 +379,41 @@ def ci_rebuild(args):
|
|||||||
fd.write(b'\n')
|
fd.write(b'\n')
|
||||||
|
|
||||||
# If we decided there should be a temporary storage mechanism, add that
|
# If we decided there should be a temporary storage mechanism, add that
|
||||||
# mirror now so it's used when we check for a full hash match already
|
# mirror now so it's used when we check for a hash match already
|
||||||
# built for this spec.
|
# built for this spec.
|
||||||
if pipeline_mirror_url:
|
if pipeline_mirror_url:
|
||||||
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
||||||
pipeline_mirror_url,
|
pipeline_mirror_url,
|
||||||
cfg.default_modify_scope())
|
cfg.default_modify_scope())
|
||||||
|
|
||||||
# Check configured mirrors for a built spec with a matching full hash
|
# Check configured mirrors for a built spec with a matching hash
|
||||||
|
mirrors_to_check = None
|
||||||
|
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
|
||||||
|
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||||
|
# mirror to see if we should skip building, which is what we want.
|
||||||
|
mirrors_to_check = {
|
||||||
|
'override': remote_mirror_override
|
||||||
|
}
|
||||||
|
|
||||||
|
# Adding this mirror to the list of configured mirrors means dependencies
|
||||||
|
# could be installed from either the override mirror or any other configured
|
||||||
|
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||||
|
# pipeline_mirror_url), which is also what we want.
|
||||||
|
spack.mirror.add('mirror_override',
|
||||||
|
remote_mirror_override,
|
||||||
|
cfg.default_modify_scope())
|
||||||
|
|
||||||
matches = bindist.get_mirrors_for_spec(
|
matches = bindist.get_mirrors_for_spec(
|
||||||
job_spec, full_hash_match=True, index_only=False)
|
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
|
||||||
|
|
||||||
if matches:
|
if matches:
|
||||||
# Got a full hash match on at least one configured mirror. All
|
# Got a hash match on at least one configured mirror. All
|
||||||
# matches represent the fully up-to-date spec, so should all be
|
# matches represent the fully up-to-date spec, so should all be
|
||||||
# equivalent. If artifacts mirror is enabled, we just pick one
|
# equivalent. If artifacts mirror is enabled, we just pick one
|
||||||
# of the matches and download the buildcache files from there to
|
# of the matches and download the buildcache files from there to
|
||||||
# the artifacts, so they're available to be used by dependent
|
# the artifacts, so they're available to be used by dependent
|
||||||
# jobs in subsequent stages.
|
# jobs in subsequent stages.
|
||||||
tty.msg('No need to rebuild {0}, found full hash match at: '.format(
|
tty.msg('No need to rebuild {0}, found hash match at: '.format(
|
||||||
job_spec_pkg_name))
|
job_spec_pkg_name))
|
||||||
for match in matches:
|
for match in matches:
|
||||||
tty.msg(' {0}'.format(match['mirror_url']))
|
tty.msg(' {0}'.format(match['mirror_url']))
|
||||||
@@ -403,7 +432,7 @@ def ci_rebuild(args):
|
|||||||
# Now we are done and successful
|
# Now we are done and successful
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# No full hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
||||||
@@ -415,7 +444,6 @@ def ci_rebuild(args):
|
|||||||
install_args.extend([
|
install_args.extend([
|
||||||
'install',
|
'install',
|
||||||
'--keep-stage',
|
'--keep-stage',
|
||||||
'--require-full-hash-match',
|
|
||||||
])
|
])
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
@@ -443,8 +471,8 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
# TODO: once we have the concrete spec registry, use the DAG hash
|
# TODO: once we have the concrete spec registry, use the DAG hash
|
||||||
# to identify the spec to install, rather than the concrete spec
|
# to identify the spec to install, rather than the concrete spec
|
||||||
# yaml file.
|
# json file.
|
||||||
install_args.extend(['-f', job_spec_yaml_path])
|
install_args.extend(['-f', job_spec_json_path])
|
||||||
|
|
||||||
tty.debug('Installing {0} from source'.format(job_spec.name))
|
tty.debug('Installing {0} from source'.format(job_spec.name))
|
||||||
tty.debug('spack install arguments: {0}'.format(
|
tty.debug('spack install arguments: {0}'.format(
|
||||||
@@ -477,13 +505,13 @@ def ci_rebuild(args):
|
|||||||
tty.debug('spack install exited {0}'.format(install_exit_code))
|
tty.debug('spack install exited {0}'.format(install_exit_code))
|
||||||
|
|
||||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||||
# list of known broken full hashes. This allows spack PR pipelines to
|
# list of known broken hashes. This allows spack PR pipelines to
|
||||||
# avoid wasting compute cycles attempting to build those hashes.
|
# avoid wasting compute cycles attempting to build those hashes.
|
||||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||||
tty.debug('Install failed on develop')
|
tty.debug('Install failed on develop')
|
||||||
if 'broken-specs-url' in gitlab_ci:
|
if 'broken-specs-url' in gitlab_ci:
|
||||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||||
dev_fail_hash = job_spec.full_hash()
|
dev_fail_hash = job_spec.dag_hash()
|
||||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||||
tty.msg('Reporting broken develop build as: {0}'.format(
|
tty.msg('Reporting broken develop build as: {0}'.format(
|
||||||
broken_spec_path))
|
broken_spec_path))
|
||||||
@@ -494,7 +522,7 @@ def ci_rebuild(args):
|
|||||||
'broken-spec': {
|
'broken-spec': {
|
||||||
'job-url': get_env_var('CI_JOB_URL'),
|
'job-url': get_env_var('CI_JOB_URL'),
|
||||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -520,13 +548,6 @@ def ci_rebuild(args):
|
|||||||
# any logs from the staging directory to artifacts now
|
# any logs from the staging directory to artifacts now
|
||||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||||
|
|
||||||
# Create buildcache on remote mirror, either on pr-specific mirror or
|
|
||||||
# on the main mirror defined in the gitlab-enabled spack environment
|
|
||||||
if spack_is_pr_pipeline:
|
|
||||||
buildcache_mirror_url = pr_mirror_url
|
|
||||||
else:
|
|
||||||
buildcache_mirror_url = remote_mirror_url
|
|
||||||
|
|
||||||
# If the install succeeded, create a buildcache entry for this job spec
|
# If the install succeeded, create a buildcache entry for this job spec
|
||||||
# and push it to one or more mirrors. If the install did not succeed,
|
# and push it to one or more mirrors. If the install did not succeed,
|
||||||
# print out some instructions on how to reproduce this build failure
|
# print out some instructions on how to reproduce this build failure
|
||||||
@@ -539,7 +560,7 @@ def ci_rebuild(args):
|
|||||||
# per-PR mirror, if this is a PR pipeline
|
# per-PR mirror, if this is a PR pipeline
|
||||||
if buildcache_mirror_url:
|
if buildcache_mirror_url:
|
||||||
spack_ci.push_mirror_contents(
|
spack_ci.push_mirror_contents(
|
||||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create another copy of that buildcache in the per-pipeline
|
# Create another copy of that buildcache in the per-pipeline
|
||||||
@@ -548,14 +569,14 @@ def ci_rebuild(args):
|
|||||||
# prefix is set)
|
# prefix is set)
|
||||||
if pipeline_mirror_url:
|
if pipeline_mirror_url:
|
||||||
spack_ci.push_mirror_contents(
|
spack_ci.push_mirror_contents(
|
||||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
||||||
)
|
)
|
||||||
|
|
||||||
# If this is a develop pipeline, check if the spec that we just built is
|
# If this is a develop pipeline, check if the spec that we just built is
|
||||||
# on the broken-specs list. If so, remove it.
|
# on the broken-specs list. If so, remove it.
|
||||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||||
just_built_hash = job_spec.full_hash()
|
just_built_hash = job_spec.dag_hash()
|
||||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||||
if web_util.url_exists(broken_spec_path):
|
if web_util.url_exists(broken_spec_path):
|
||||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||||
|
|||||||
@@ -58,6 +58,21 @@ def setup_parser(subparser):
|
|||||||
arguments.add_common_arguments(subparser, ['specs'])
|
arguments.add_common_arguments(subparser, ['specs'])
|
||||||
|
|
||||||
|
|
||||||
|
def remove_python_cache():
|
||||||
|
for directory in [lib_path, var_path]:
|
||||||
|
for root, dirs, files in os.walk(directory):
|
||||||
|
for f in files:
|
||||||
|
if f.endswith('.pyc') or f.endswith('.pyo'):
|
||||||
|
fname = os.path.join(root, f)
|
||||||
|
tty.debug('Removing {0}'.format(fname))
|
||||||
|
os.remove(fname)
|
||||||
|
for d in dirs:
|
||||||
|
if d == '__pycache__':
|
||||||
|
dname = os.path.join(root, d)
|
||||||
|
tty.debug('Removing {0}'.format(dname))
|
||||||
|
shutil.rmtree(dname)
|
||||||
|
|
||||||
|
|
||||||
def clean(parser, args):
|
def clean(parser, args):
|
||||||
# If nothing was set, activate the default
|
# If nothing was set, activate the default
|
||||||
if not any([args.specs, args.stage, args.downloads, args.failures,
|
if not any([args.specs, args.stage, args.downloads, args.failures,
|
||||||
@@ -70,8 +85,7 @@ def clean(parser, args):
|
|||||||
for spec in specs:
|
for spec in specs:
|
||||||
msg = 'Cleaning build stage [{0}]'
|
msg = 'Cleaning build stage [{0}]'
|
||||||
tty.msg(msg.format(spec.short_spec))
|
tty.msg(msg.format(spec.short_spec))
|
||||||
package = spack.repo.get(spec)
|
spec.package.do_clean()
|
||||||
package.do_clean()
|
|
||||||
|
|
||||||
if args.stage:
|
if args.stage:
|
||||||
tty.msg('Removing all temporary build stages')
|
tty.msg('Removing all temporary build stages')
|
||||||
@@ -95,18 +109,7 @@ def clean(parser, args):
|
|||||||
|
|
||||||
if args.python_cache:
|
if args.python_cache:
|
||||||
tty.msg('Removing python cache files')
|
tty.msg('Removing python cache files')
|
||||||
for directory in [lib_path, var_path]:
|
remove_python_cache()
|
||||||
for root, dirs, files in os.walk(directory):
|
|
||||||
for f in files:
|
|
||||||
if f.endswith('.pyc') or f.endswith('.pyo'):
|
|
||||||
fname = os.path.join(root, f)
|
|
||||||
tty.debug('Removing {0}'.format(fname))
|
|
||||||
os.remove(fname)
|
|
||||||
for d in dirs:
|
|
||||||
if d == '__pycache__':
|
|
||||||
dname = os.path.join(root, d)
|
|
||||||
tty.debug('Removing {0}'.format(dname))
|
|
||||||
shutil.rmtree(dname)
|
|
||||||
|
|
||||||
if args.bootstrap:
|
if args.bootstrap:
|
||||||
bootstrap_prefix = spack.util.path.canonicalize_path(
|
bootstrap_prefix = spack.util.path.canonicalize_path(
|
||||||
|
|||||||
@@ -403,4 +403,4 @@ def add_s3_connection_args(subparser, add_help):
|
|||||||
default=None)
|
default=None)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--s3-endpoint-url',
|
'--s3-endpoint-url',
|
||||||
help="Access Token to use to connect to this S3 mirror")
|
help="Endpoint URL to use to connect to this S3 mirror")
|
||||||
|
|||||||
@@ -18,6 +18,8 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
||||||
|
arguments.add_concretizer_args(subparser)
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--dump', metavar="FILE",
|
'--dump', metavar="FILE",
|
||||||
help="dump a source-able environment to FILE"
|
help="dump a source-able environment to FILE"
|
||||||
|
|||||||
@@ -22,6 +22,9 @@ def setup_parser(subparser):
|
|||||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||||
dependencies are only added for the environment's root specs. When 'all' is
|
dependencies are only added for the environment's root specs. When 'all' is
|
||||||
chosen, test dependencies are enabled for all packages in the environment.""")
|
chosen, test dependencies are enabled for all packages in the environment.""")
|
||||||
|
subparser.add_argument(
|
||||||
|
'-q', '--quiet', action='store_true',
|
||||||
|
help="Don't print concretized specs")
|
||||||
|
|
||||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||||
|
|
||||||
@@ -38,5 +41,6 @@ def concretize(parser, args):
|
|||||||
|
|
||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||||
ev.display_specs(concretized_specs)
|
if not args.quiet:
|
||||||
|
ev.display_specs(concretized_specs)
|
||||||
env.write()
|
env.write()
|
||||||
|
|||||||
@@ -9,7 +9,6 @@
|
|||||||
|
|
||||||
import spack.container
|
import spack.container
|
||||||
import spack.container.images
|
import spack.container.images
|
||||||
import spack.monitor
|
|
||||||
|
|
||||||
description = ("creates recipes to build images for different"
|
description = ("creates recipes to build images for different"
|
||||||
" container runtimes")
|
" container runtimes")
|
||||||
@@ -18,7 +17,6 @@
|
|||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--list-os', action='store_true', default=False,
|
'--list-os', action='store_true', default=False,
|
||||||
help='list all the OS that can be used in the bootstrap phase and exit'
|
help='list all the OS that can be used in the bootstrap phase and exit'
|
||||||
@@ -46,14 +44,5 @@ def containerize(parser, args):
|
|||||||
raise ValueError(msg.format(config_file))
|
raise ValueError(msg.format(config_file))
|
||||||
|
|
||||||
config = spack.container.validate(config_file)
|
config = spack.container.validate(config_file)
|
||||||
|
|
||||||
# If we have a monitor request, add monitor metadata to config
|
|
||||||
if args.use_monitor:
|
|
||||||
config['spack']['monitor'] = {
|
|
||||||
"host": args.monitor_host,
|
|
||||||
"keep_going": args.monitor_keep_going,
|
|
||||||
"prefix": args.monitor_prefix,
|
|
||||||
"tags": args.monitor_tags
|
|
||||||
}
|
|
||||||
recipe = spack.container.recipe(config, last_phase=args.last_stage)
|
recipe = spack.container.recipe(config, last_phase=args.last_stage)
|
||||||
print(recipe)
|
print(recipe)
|
||||||
|
|||||||
@@ -57,7 +57,7 @@
|
|||||||
# See the Spack documentation for more information on packaging.
|
# See the Spack documentation for more information on packaging.
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
from spack import *
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
class {class_name}({base_class_name}):
|
class {class_name}({base_class_name}):
|
||||||
@@ -187,6 +187,27 @@ def cmake_args(self):
|
|||||||
return args"""
|
return args"""
|
||||||
|
|
||||||
|
|
||||||
|
class LuaPackageTemplate(PackageTemplate):
|
||||||
|
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||||
|
|
||||||
|
base_class_name = 'LuaPackage'
|
||||||
|
|
||||||
|
body_def = """\
|
||||||
|
def luarocks_args(self):
|
||||||
|
# FIXME: Add arguments to `luarocks make` other than rockspec path
|
||||||
|
# FIXME: If not needed delete this function
|
||||||
|
args = []
|
||||||
|
return args"""
|
||||||
|
|
||||||
|
def __init__(self, name, url, *args, **kwargs):
|
||||||
|
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
|
||||||
|
if not name.startswith('lua-'):
|
||||||
|
# Make it more obvious that we are renaming the package
|
||||||
|
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||||
|
name = 'lua-{0}'.format(name)
|
||||||
|
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class MesonPackageTemplate(PackageTemplate):
|
class MesonPackageTemplate(PackageTemplate):
|
||||||
"""Provides appropriate overrides for meson-based packages"""
|
"""Provides appropriate overrides for meson-based packages"""
|
||||||
|
|
||||||
@@ -580,6 +601,7 @@ def __init__(self, name, *args, **kwargs):
|
|||||||
'makefile': MakefilePackageTemplate,
|
'makefile': MakefilePackageTemplate,
|
||||||
'intel': IntelPackageTemplate,
|
'intel': IntelPackageTemplate,
|
||||||
'meson': MesonPackageTemplate,
|
'meson': MesonPackageTemplate,
|
||||||
|
'lua': LuaPackageTemplate,
|
||||||
'sip': SIPPackageTemplate,
|
'sip': SIPPackageTemplate,
|
||||||
'generic': PackageTemplate,
|
'generic': PackageTemplate,
|
||||||
}
|
}
|
||||||
@@ -644,6 +666,9 @@ def __call__(self, stage, url):
|
|||||||
if url.endswith('.whl') or '.whl#' in url:
|
if url.endswith('.whl') or '.whl#' in url:
|
||||||
self.build_system = 'python'
|
self.build_system = 'python'
|
||||||
return
|
return
|
||||||
|
if url.endswith('.rock'):
|
||||||
|
self.build_system = 'lua'
|
||||||
|
return
|
||||||
|
|
||||||
# A list of clues that give us an idea of the build system a package
|
# A list of clues that give us an idea of the build system a package
|
||||||
# uses. If the regular expression matches a file contained in the
|
# uses. If the regular expression matches a file contained in the
|
||||||
@@ -668,6 +693,7 @@ def __call__(self, stage, url):
|
|||||||
(r'/Rakefile$', 'ruby'),
|
(r'/Rakefile$', 'ruby'),
|
||||||
(r'/setup\.rb$', 'ruby'),
|
(r'/setup\.rb$', 'ruby'),
|
||||||
(r'/.*\.pro$', 'qmake'),
|
(r'/.*\.pro$', 'qmake'),
|
||||||
|
(r'/.*\.rockspec$', 'lua'),
|
||||||
(r'/(GNU)?[Mm]akefile$', 'makefile'),
|
(r'/(GNU)?[Mm]akefile$', 'makefile'),
|
||||||
(r'/DESCRIPTION$', 'octave'),
|
(r'/DESCRIPTION$', 'octave'),
|
||||||
(r'/meson\.build$', 'meson'),
|
(r'/meson\.build$', 'meson'),
|
||||||
@@ -800,7 +826,7 @@ def get_versions(args, name):
|
|||||||
spack.util.url.require_url_format(args.url)
|
spack.util.url.require_url_format(args.url)
|
||||||
if args.url.startswith('file://'):
|
if args.url.startswith('file://'):
|
||||||
valid_url = False # No point in spidering these
|
valid_url = False # No point in spidering these
|
||||||
except ValueError:
|
except (ValueError, TypeError):
|
||||||
valid_url = False
|
valid_url = False
|
||||||
|
|
||||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.package
|
import spack.package_base
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.store
|
import spack.store
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ def dependencies(parser, args):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
spec = specs[0]
|
spec = specs[0]
|
||||||
dependencies = spack.package.possible_dependencies(
|
dependencies = spack.package_base.possible_dependencies(
|
||||||
spec,
|
spec,
|
||||||
transitive=args.transitive,
|
transitive=args.transitive,
|
||||||
expand_virtuals=args.expand_virtuals,
|
expand_virtuals=args.expand_virtuals,
|
||||||
|
|||||||
@@ -39,9 +39,9 @@ def inverted_dependencies():
|
|||||||
actual dependents.
|
actual dependents.
|
||||||
"""
|
"""
|
||||||
dag = {}
|
dag = {}
|
||||||
for pkg in spack.repo.path.all_packages():
|
for pkg_cls in spack.repo.path.all_package_classes():
|
||||||
dag.setdefault(pkg.name, set())
|
dag.setdefault(pkg_cls.name, set())
|
||||||
for dep in pkg.dependencies:
|
for dep in pkg_cls.dependencies:
|
||||||
deps = [dep]
|
deps = [dep]
|
||||||
|
|
||||||
# expand virtuals if necessary
|
# expand virtuals if necessary
|
||||||
@@ -49,7 +49,7 @@ def inverted_dependencies():
|
|||||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||||
|
|
||||||
for d in deps:
|
for d in deps:
|
||||||
dag.setdefault(d, set()).add(pkg.name)
|
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||||
return dag
|
return dag
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -87,12 +87,10 @@ def dev_build(self, args):
|
|||||||
|
|
||||||
# Forces the build to run out of the source directory.
|
# Forces the build to run out of the source directory.
|
||||||
spec.constrain('dev_path=%s' % source_path)
|
spec.constrain('dev_path=%s' % source_path)
|
||||||
|
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
package = spack.repo.get(spec)
|
|
||||||
|
|
||||||
if package.installed:
|
if spec.installed:
|
||||||
tty.error("Already installed in %s" % package.prefix)
|
tty.error("Already installed in %s" % spec.prefix)
|
||||||
tty.msg("Uninstall or try adding a version suffix for this dev build.")
|
tty.msg("Uninstall or try adding a version suffix for this dev build.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@@ -109,7 +107,7 @@ def dev_build(self, args):
|
|||||||
elif args.test == 'root':
|
elif args.test == 'root':
|
||||||
tests = [spec.name for spec in specs]
|
tests = [spec.name for spec in specs]
|
||||||
|
|
||||||
package.do_install(
|
spec.package.do_install(
|
||||||
tests=tests,
|
tests=tests,
|
||||||
make_jobs=args.jobs,
|
make_jobs=args.jobs,
|
||||||
keep_prefix=args.keep_prefix,
|
keep_prefix=args.keep_prefix,
|
||||||
@@ -122,5 +120,5 @@ def dev_build(self, args):
|
|||||||
|
|
||||||
# drop into the build environment of the package?
|
# drop into the build environment of the package?
|
||||||
if args.shell is not None:
|
if args.shell is not None:
|
||||||
spack.build_environment.setup_package(package, dirty=False)
|
spack.build_environment.setup_package(spec.package, dirty=False)
|
||||||
os.execvp(args.shell, [args.shell])
|
os.execvp(args.shell, [args.shell])
|
||||||
|
|||||||
@@ -54,8 +54,9 @@ def develop(parser, args):
|
|||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
stage = spack.spec.Spec(entry['spec']).package.stage
|
spec = spack.spec.Spec(entry['spec'])
|
||||||
stage.steal_source(abspath)
|
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||||
|
pkg_cls(spec).stage.steal_source(abspath)
|
||||||
|
|
||||||
if not env.dev_specs:
|
if not env.dev_specs:
|
||||||
tty.warn("No develop specs to download")
|
tty.warn("No develop specs to download")
|
||||||
|
|||||||
@@ -68,8 +68,14 @@ def compare_specs(a, b, to_string=False, color=None):
|
|||||||
# Prepare a solver setup to parse differences
|
# Prepare a solver setup to parse differences
|
||||||
setup = asp.SpackSolverSetup()
|
setup = asp.SpackSolverSetup()
|
||||||
|
|
||||||
a_facts = set(t for t in setup.spec_clauses(a, body=True, expand_hashes=True))
|
# get facts for specs, making sure to include build dependencies of concrete
|
||||||
b_facts = set(t for t in setup.spec_clauses(b, body=True, expand_hashes=True))
|
# specs and to descend into dependency hashes so we include all facts.
|
||||||
|
a_facts = set(t for t in setup.spec_clauses(
|
||||||
|
a, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||||
|
))
|
||||||
|
b_facts = set(t for t in setup.spec_clauses(
|
||||||
|
b, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||||
|
))
|
||||||
|
|
||||||
# We want to present them to the user as simple key: values
|
# We want to present them to the user as simple key: values
|
||||||
intersect = sorted(a_facts.intersection(b_facts))
|
intersect = sorted(a_facts.intersection(b_facts))
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user