Compare commits
694 Commits
versions/g
...
features/r
Author | SHA1 | Date | |
---|---|---|---|
![]() |
771c4e4017 | ||
![]() |
9633145374 | ||
![]() |
29d96633ee | ||
![]() |
267da78559 | ||
![]() |
013a0a04a4 | ||
![]() |
a6a7c3ce88 | ||
![]() |
960b48b613 | ||
![]() |
84bb8e316d | ||
![]() |
010068c50a | ||
![]() |
a7ebdd2d10 | ||
![]() |
62553e7521 | ||
![]() |
f961a11187 | ||
![]() |
b00983439f | ||
![]() |
5fe4ac2b05 | ||
![]() |
c60ded8ccc | ||
![]() |
ce7c8b44d5 | ||
![]() |
4817e4c80c | ||
![]() |
111aeeb0ae | ||
![]() |
3a70df9f64 | ||
![]() |
616a8eebcd | ||
![]() |
ac9bd3d72d | ||
![]() |
7118602251 | ||
![]() |
7ebce6191a | ||
![]() |
03832a0b0f | ||
![]() |
b2534171be | ||
![]() |
4905a71d6d | ||
![]() |
0abc4564ae | ||
![]() |
13d05848ad | ||
![]() |
6f2f9653c5 | ||
![]() |
8556f83238 | ||
![]() |
a717c5df40 | ||
![]() |
3552cfac1a | ||
![]() |
b792e08c15 | ||
![]() |
d1d86d68f1 | ||
![]() |
974beb4ffc | ||
![]() |
b7f8899d45 | ||
![]() |
e47ec07ffc | ||
![]() |
778eee1579 | ||
![]() |
a43a633b2a | ||
![]() |
5e00dffe7f | ||
![]() |
7aa1fef506 | ||
![]() |
2d94624d0a | ||
![]() |
cc6cc4c7e5 | ||
![]() |
943d463a2d | ||
![]() |
53eb044b28 | ||
![]() |
4f3553ae7c | ||
![]() |
e59d7fcb9b | ||
![]() |
056ea79d2c | ||
![]() |
83533fc31f | ||
![]() |
0420c829c6 | ||
![]() |
c664a31f7b | ||
![]() |
a710a2425a | ||
![]() |
5e014be8b7 | ||
![]() |
abc162cf3b | ||
![]() |
dc3cf5c6b0 | ||
![]() |
75638c1e88 | ||
![]() |
fb5b78335b | ||
![]() |
63600c0d0d | ||
![]() |
beaf1c7fcc | ||
![]() |
2da7f40cbc | ||
![]() |
4c446d54b1 | ||
![]() |
4067a28182 | ||
![]() |
f0ceeac0a8 | ||
![]() |
2965a7a7e9 | ||
![]() |
3273928ddc | ||
![]() |
a4f32dbf8d | ||
![]() |
823b982ee1 | ||
![]() |
afbbbf9921 | ||
![]() |
68d32f7fdc | ||
![]() |
e13655b584 | ||
![]() |
78facf0761 | ||
![]() |
17c264b30a | ||
![]() |
634d51cec2 | ||
![]() |
bf1ef4bc37 | ||
![]() |
6b62a15ab6 | ||
![]() |
9a05a7a714 | ||
![]() |
1c13b3707c | ||
![]() |
bad6ebc910 | ||
![]() |
48122affbd | ||
![]() |
e72afede17 | ||
![]() |
0d2044c659 | ||
![]() |
a37dd23403 | ||
![]() |
d6483fe483 | ||
![]() |
be12fea11e | ||
![]() |
65cf6b0d5c | ||
![]() |
21ef70d807 | ||
![]() |
82a6d6f5d8 | ||
![]() |
a4bead73b1 | ||
![]() |
d4b68995c3 | ||
![]() |
0ce88521ce | ||
![]() |
aac7176924 | ||
![]() |
bc347f1f1c | ||
![]() |
6eac7acb93 | ||
![]() |
08ea458814 | ||
![]() |
b6c1143f3f | ||
![]() |
1cba551fe6 | ||
![]() |
326fa2392b | ||
![]() |
0c4a866c2e | ||
![]() |
54e9b6b8fb | ||
![]() |
a3a61241fc | ||
![]() |
7133838fa5 | ||
![]() |
8de726336f | ||
![]() |
3835ac0c41 | ||
![]() |
a405a6efe3 | ||
![]() |
b0c8affbd9 | ||
![]() |
03a7643816 | ||
![]() |
1ada151d80 | ||
![]() |
840858c215 | ||
![]() |
0f80a5a9d5 | ||
![]() |
c8f85ebea3 | ||
![]() |
3b8256ef6a | ||
![]() |
33d791bd1c | ||
![]() |
374ebe5840 | ||
![]() |
cf21fd837f | ||
![]() |
01609b3111 | ||
![]() |
fdc9a29a9b | ||
![]() |
0e0c438dd6 | ||
![]() |
015046cb97 | ||
![]() |
388d5fada3 | ||
![]() |
2d3a613128 | ||
![]() |
ce0346abd9 | ||
![]() |
00313f9e20 | ||
![]() |
6c03e1efb6 | ||
![]() |
b1de8a5680 | ||
![]() |
02a1f6f0f1 | ||
![]() |
79be21c8e4 | ||
![]() |
bb70e6ffcd | ||
![]() |
b09fb467f1 | ||
![]() |
dec3e31e60 | ||
![]() |
f5520de4bd | ||
![]() |
be99a72d57 | ||
![]() |
6f1a5c8fee | ||
![]() |
a482970a35 | ||
![]() |
bbb3f724af | ||
![]() |
89f6db21f1 | ||
![]() |
b28b24ccf8 | ||
![]() |
3dfa64edab | ||
![]() |
2046ca7722 | ||
![]() |
4f5d5bb2ef | ||
![]() |
34d9d58924 | ||
![]() |
0804637228 | ||
![]() |
a837008ff7 | ||
![]() |
1ffad2073d | ||
![]() |
c846b5149d | ||
![]() |
4c1250854a | ||
![]() |
fbd94b9ad2 | ||
![]() |
10a3822728 | ||
![]() |
586df30f9a | ||
![]() |
803900ddc5 | ||
![]() |
0480afe29d | ||
![]() |
cb56e9b745 | ||
![]() |
6a8aa59717 | ||
![]() |
110ade1804 | ||
![]() |
5db279f986 | ||
![]() |
67a90a644f | ||
![]() |
fef84f2265 | ||
![]() |
b0e35dffa3 | ||
![]() |
a3520d14bd | ||
![]() |
7d534f38d6 | ||
![]() |
2aec5b65f3 | ||
![]() |
17e2fb0ef6 | ||
![]() |
ce09b42cdd | ||
![]() |
433d404a93 | ||
![]() |
8d56728984 | ||
![]() |
39b58abd9c | ||
![]() |
d7a6cb3e53 | ||
![]() |
fd724fb38d | ||
![]() |
2bb9eeac28 | ||
![]() |
b7ce6a5ec1 | ||
![]() |
1999135cab | ||
![]() |
a708844e87 | ||
![]() |
4dd5bcb3ef | ||
![]() |
dc44642bfb | ||
![]() |
a9b4f33f23 | ||
![]() |
6e5cba7b82 | ||
![]() |
b45e734b59 | ||
![]() |
624077e629 | ||
![]() |
da0a7836f3 | ||
![]() |
271f0c024a | ||
![]() |
8deb50fea5 | ||
![]() |
b71661eaa6 | ||
![]() |
f110d0848a | ||
![]() |
b033dbbbd0 | ||
![]() |
1a999d6e2d | ||
![]() |
9740c3b300 | ||
![]() |
b9d12df7a9 | ||
![]() |
a9a7ac71fa | ||
![]() |
c8f37797a4 | ||
![]() |
6bdd9db594 | ||
![]() |
92531ca773 | ||
![]() |
495dd61218 | ||
![]() |
d0ad644ea6 | ||
![]() |
c2f470f4a6 | ||
![]() |
831346a3fb | ||
![]() |
ae76834f3d | ||
![]() |
714573cdbc | ||
![]() |
7d67577156 | ||
![]() |
7dc569ceeb | ||
![]() |
7475eba798 | ||
![]() |
25ae8c526b | ||
![]() |
c364a04a42 | ||
![]() |
f229f746db | ||
![]() |
7385ea5f01 | ||
![]() |
8e177a45d2 | ||
![]() |
b1b9ed298d | ||
![]() |
70f6496f08 | ||
![]() |
9bc5b0cad5 | ||
![]() |
1ea05cd456 | ||
![]() |
65763d18fb | ||
![]() |
796ce7fe84 | ||
![]() |
4564aa4549 | ||
![]() |
646a4bb26c | ||
![]() |
7893be7712 | ||
![]() |
3ad3e53ff8 | ||
![]() |
f6795ae46d | ||
![]() |
23e85f4086 | ||
![]() |
dfff935f17 | ||
![]() |
b662a5968b | ||
![]() |
99083f1706 | ||
![]() |
13f3bd533d | ||
![]() |
8c0b695f13 | ||
![]() |
79ba0c50c1 | ||
![]() |
b667be470e | ||
![]() |
99425e273b | ||
![]() |
dbf947599f | ||
![]() |
bc34bcfbc2 | ||
![]() |
08c73e456c | ||
![]() |
3a6ff4dd92 | ||
![]() |
012889bc40 | ||
![]() |
8ab6af9ed9 | ||
![]() |
170613ff6b | ||
![]() |
ff04d1bfc1 | ||
![]() |
48b222c36b | ||
![]() |
6058d52746 | ||
![]() |
5f996edde9 | ||
![]() |
ea8c9ae6b7 | ||
![]() |
b92a6d106b | ||
![]() |
2474609395 | ||
![]() |
dd6f4e680a | ||
![]() |
e73cf5df44 | ||
![]() |
544486538c | ||
![]() |
f5b1d513e1 | ||
![]() |
fa6f1336c1 | ||
![]() |
b2eda32e55 | ||
![]() |
ff33978b0d | ||
![]() |
43577beb9c | ||
![]() |
656074a96d | ||
![]() |
cd0b6b9088 | ||
![]() |
57f5c92a39 | ||
![]() |
7477161a05 | ||
![]() |
5283ca5781 | ||
![]() |
beff697cc0 | ||
![]() |
0dc3c85a90 | ||
![]() |
558f7f007e | ||
![]() |
a717ba9b87 | ||
![]() |
419c2ecb95 | ||
![]() |
532182db52 | ||
![]() |
bd3e86ee28 | ||
![]() |
7613b8a3bc | ||
![]() |
49f9a2136d | ||
![]() |
57e2ea1ac4 | ||
![]() |
2816d28e90 | ||
![]() |
da6d9a33c1 | ||
![]() |
c8728a1526 | ||
![]() |
24800c3de5 | ||
![]() |
e520d77760 | ||
![]() |
90c4b43274 | ||
![]() |
2d6ba6663a | ||
![]() |
764a97d428 | ||
![]() |
69b3a88fa3 | ||
![]() |
6a3c0825e3 | ||
![]() |
935faeb0c0 | ||
![]() |
16c5091026 | ||
![]() |
b2717a8abf | ||
![]() |
4b3f6cede8 | ||
![]() |
f2fc4ee9af | ||
![]() |
d64de54ebe | ||
![]() |
b7eb4af98f | ||
![]() |
075457db00 | ||
![]() |
88d8ca9b65 | ||
![]() |
8ddaa08ed2 | ||
![]() |
6d42a1e0f0 | ||
![]() |
36c0a1eec3 | ||
![]() |
d7aac3af24 | ||
![]() |
5680c90c7d | ||
![]() |
b9cdc7ed55 | ||
![]() |
fbf50fbfa8 | ||
![]() |
a3de8eb8bb | ||
![]() |
a3c3f9f98b | ||
![]() |
a24306dad7 | ||
![]() |
fc00119e52 | ||
![]() |
1e437bbc66 | ||
![]() |
d04f6e47ff | ||
![]() |
ee505e6c69 | ||
![]() |
d61e54b4c7 | ||
![]() |
443db0b8c3 | ||
![]() |
105b42c15f | ||
![]() |
9cc45f66b7 | ||
![]() |
28a9da9130 | ||
![]() |
21cb859b4f | ||
![]() |
dc7fbafdb2 | ||
![]() |
d5a78d49fe | ||
![]() |
8ef299ed16 | ||
![]() |
79bc2949ce | ||
![]() |
542f46065a | ||
![]() |
c7b9354810 | ||
![]() |
a94e47743f | ||
![]() |
bfb846a513 | ||
![]() |
14c5d26c48 | ||
![]() |
dfc6c2a437 | ||
![]() |
5874d1fbd8 | ||
![]() |
a53ffbd621 | ||
![]() |
54491a5d96 | ||
![]() |
f12c3fcd55 | ||
![]() |
29da99427e | ||
![]() |
a58fa289b9 | ||
![]() |
ea61a657ae | ||
![]() |
56695bb09c | ||
![]() |
6237d54936 | ||
![]() |
a7b91321e5 | ||
![]() |
9dac0dea25 | ||
![]() |
9a977cb694 | ||
![]() |
88b1bf751d | ||
![]() |
90da5dc909 | ||
![]() |
44263b7226 | ||
![]() |
1ecb19894c | ||
![]() |
81e9e47337 | ||
![]() |
cbba0ffc0c | ||
![]() |
f20db5fc61 | ||
![]() |
40154d83ae | ||
![]() |
5c5da19b0c | ||
![]() |
75d692eed3 | ||
![]() |
f6f89677a7 | ||
![]() |
26bcbc521e | ||
![]() |
c99b3c31db | ||
![]() |
768c35dd23 | ||
![]() |
c9054a66af | ||
![]() |
5d47347d8c | ||
![]() |
f136f62407 | ||
![]() |
63b981dba4 | ||
![]() |
685e3d7ae9 | ||
![]() |
f97be99f2a | ||
![]() |
1eb61dd9f3 | ||
![]() |
4571f4c994 | ||
![]() |
20000493f3 | ||
![]() |
36fcf461d9 | ||
![]() |
58a32b04d9 | ||
![]() |
d13c1cfa9f | ||
![]() |
a77ce04005 | ||
![]() |
cd6e70f539 | ||
![]() |
1b067ee3e6 | ||
![]() |
cd2b2898a1 | ||
![]() |
4c437a8683 | ||
![]() |
4898759eab | ||
![]() |
295507129b | ||
![]() |
eba0997521 | ||
![]() |
acb982f7fa | ||
![]() |
ef1e96def4 | ||
![]() |
fdab17a4d4 | ||
![]() |
8806f9f826 | ||
![]() |
54d738ff9c | ||
![]() |
e58ac0705d | ||
![]() |
0182e39d44 | ||
![]() |
7cfe626e21 | ||
![]() |
0367b73ed5 | ||
![]() |
f66e7aea94 | ||
![]() |
ef23c3b122 | ||
![]() |
078ee48c4b | ||
![]() |
9516fa9447 | ||
![]() |
c0b400c422 | ||
![]() |
134debc518 | ||
![]() |
ed1a48f50a | ||
![]() |
02cbb8ffaa | ||
![]() |
fd055d4678 | ||
![]() |
40fad1472a | ||
![]() |
00469d84e7 | ||
![]() |
1c2fff2cb7 | ||
![]() |
5424e4bdc1 | ||
![]() |
5fe20b462c | ||
![]() |
2856b29740 | ||
![]() |
a7e365536b | ||
![]() |
6aedbb32da | ||
![]() |
9e169ae243 | ||
![]() |
1dbab78db1 | ||
![]() |
8228ac7d62 | ||
![]() |
3f41f8d093 | ||
![]() |
1ac7f2b02b | ||
![]() |
26f94b24e8 | ||
![]() |
c75fb4da20 | ||
![]() |
cd00eba9d6 | ||
![]() |
073330e893 | ||
![]() |
bb9ac2be9c | ||
![]() |
b1f38f6fa2 | ||
![]() |
8e4e69e91d | ||
![]() |
a785eeb896 | ||
![]() |
1b41e7408d | ||
![]() |
15bdc6b3e1 | ||
![]() |
56717e8b85 | ||
![]() |
048a0de35b | ||
![]() |
4702b49094 | ||
![]() |
0168047429 | ||
![]() |
0a3d496db1 | ||
![]() |
e22fbdb6b8 | ||
![]() |
f8201f4acc | ||
![]() |
27e57c444e | ||
![]() |
1b55057f36 | ||
![]() |
f8224f284c | ||
![]() |
1a728c98ff | ||
![]() |
59e522e815 | ||
![]() |
011a8b3f3e | ||
![]() |
20255b6161 | ||
![]() |
34732c57b3 | ||
![]() |
c300b92047 | ||
![]() |
f41c3a0fe9 | ||
![]() |
773da7ceba | ||
![]() |
487b1c3690 | ||
![]() |
5afdccba06 | ||
![]() |
49498af9ec | ||
![]() |
5df10c04cd | ||
![]() |
8f89932aad | ||
![]() |
b936168224 | ||
![]() |
80195bd1ed | ||
![]() |
eda5b854a5 | ||
![]() |
6760175289 | ||
![]() |
a14901f792 | ||
![]() |
9177f223c3 | ||
![]() |
ad6d28d9ad | ||
![]() |
26ff44388f | ||
![]() |
075c76128a | ||
![]() |
226004b960 | ||
![]() |
3448a4642a | ||
![]() |
dae656405f | ||
![]() |
3c0e422509 | ||
![]() |
b08f147cf3 | ||
![]() |
15dbc86177 | ||
![]() |
2f3a31cef6 | ||
![]() |
fa554d763e | ||
![]() |
b4b8aaf3bc | ||
![]() |
c4b3b9ed9a | ||
![]() |
674030ce62 | ||
![]() |
0b7eca36d1 | ||
![]() |
3c1f87099b | ||
![]() |
6d0b59b7ea | ||
![]() |
dba7a03daa | ||
![]() |
560abdc46d | ||
![]() |
9c2465173e | ||
![]() |
06a9496a39 | ||
![]() |
8fcfde62df | ||
![]() |
b65054881f | ||
![]() |
6b6f67e90f | ||
![]() |
e485d16c62 | ||
![]() |
18f71aef1e | ||
![]() |
ef0bc71035 | ||
![]() |
b4ab4ddc27 | ||
![]() |
f2b123760c | ||
![]() |
db5bd7134a | ||
![]() |
26a62a3258 | ||
![]() |
732f1dd14d | ||
![]() |
e05f453531 | ||
![]() |
d69e8dbace | ||
![]() |
3ec27332ec | ||
![]() |
311dc67536 | ||
![]() |
1b849aaa2c | ||
![]() |
b67c7e593b | ||
![]() |
daaac3e2f0 | ||
![]() |
c7f9431e1b | ||
![]() |
b0a712097a | ||
![]() |
558e99ecca | ||
![]() |
12ed453707 | ||
![]() |
8fdc2ff462 | ||
![]() |
cf3cfab86c | ||
![]() |
8f5b9a89fb | ||
![]() |
6d1da528f7 | ||
![]() |
6611f0bf4b | ||
![]() |
3d6156d3b2 | ||
![]() |
c6b69a8244 | ||
![]() |
d349677dcb | ||
![]() |
b5dc0c63ea | ||
![]() |
048b5ad2d0 | ||
![]() |
9b0d5cbabf | ||
![]() |
26552533be | ||
![]() |
4b0cddb674 | ||
![]() |
c3a1dd0bdb | ||
![]() |
bfb6873ce3 | ||
![]() |
71c9334e30 | ||
![]() |
8c508b530b | ||
![]() |
7f3a63aaa6 | ||
![]() |
38cce0355c | ||
![]() |
892aa0fd16 | ||
![]() |
a627684115 | ||
![]() |
9c780ac413 | ||
![]() |
8537634283 | ||
![]() |
3ffe9a27b2 | ||
![]() |
9e01e17dc6 | ||
![]() |
531b1c5c3d | ||
![]() |
be0d611dc1 | ||
![]() |
e0e3443f3a | ||
![]() |
56ba61c52f | ||
![]() |
dd67f46c6b | ||
![]() |
62fcf407e2 | ||
![]() |
2fa495154e | ||
![]() |
0ce8b9d398 | ||
![]() |
f8f4aafe81 | ||
![]() |
ea13122f3c | ||
![]() |
282a366fc4 | ||
![]() |
5a72204d38 | ||
![]() |
27f608c800 | ||
![]() |
5a85b181d3 | ||
![]() |
99c1f9b987 | ||
![]() |
4aee27816e | ||
![]() |
e63b4f752a | ||
![]() |
cf1349ba35 | ||
![]() |
e4d4a5193f | ||
![]() |
df4129d395 | ||
![]() |
06aef626cb | ||
![]() |
31daf0f2b6 | ||
![]() |
3a994032f8 | ||
![]() |
90c773488c | ||
![]() |
e65d3d14b4 | ||
![]() |
4a73bfc3b9 | ||
![]() |
f8782c46d7 | ||
![]() |
15ef85e161 | ||
![]() |
012758c179 | ||
![]() |
a7101db39d | ||
![]() |
f587a9ce68 | ||
![]() |
a0164793cb | ||
![]() |
d4d101f57e | ||
![]() |
fb0e91c534 | ||
![]() |
a7de2fa380 | ||
![]() |
b60a0eea01 | ||
![]() |
81bc00d61f | ||
![]() |
b26b3bebb4 | ||
![]() |
e174623edf | ||
![]() |
55544950e2 | ||
![]() |
3544b0274f | ||
![]() |
9ba41449a9 | ||
![]() |
5fd05a8cbe | ||
![]() |
5f3ab83e2d | ||
![]() |
fa9894ae16 | ||
![]() |
3648adc2a7 | ||
![]() |
4ef534b19b | ||
![]() |
73287df7c5 | ||
![]() |
993ce4c095 | ||
![]() |
4b9f216c11 | ||
![]() |
508e3e03b9 | ||
![]() |
9b0c2cb954 | ||
![]() |
ea93b1a03b | ||
![]() |
d3131cd2da | ||
![]() |
3a5ae0fb66 | ||
![]() |
94794d061a | ||
![]() |
1250fac467 | ||
![]() |
8c59736ff9 | ||
![]() |
825fed2ffa | ||
![]() |
be58d354ac | ||
![]() |
4176ce5c07 | ||
![]() |
8409443b1f | ||
![]() |
aee763015e | ||
![]() |
50b5b314b3 | ||
![]() |
8a754ad502 | ||
![]() |
7f03724db2 | ||
![]() |
ed359cb00b | ||
![]() |
f5a2d87eb2 | ||
![]() |
c3b6b3036d | ||
![]() |
f1bdbefd46 | ||
![]() |
f14840fcf2 | ||
![]() |
2e3bea8e17 | ||
![]() |
474493713b | ||
![]() |
ccfaec7b1c | ||
![]() |
4635c813e0 | ||
![]() |
a4f9d4fa4a | ||
![]() |
32281ee40e | ||
![]() |
8ab3b56e00 | ||
![]() |
f93107fff3 | ||
![]() |
a6eed4a7c7 | ||
![]() |
406939f485 | ||
![]() |
a649ee9b69 | ||
![]() |
0a9e41770b | ||
![]() |
717ac6ca0a | ||
![]() |
2c11d5f2a8 | ||
![]() |
8adc6b7e8e | ||
![]() |
7ad8937d7a | ||
![]() |
abab284996 | ||
![]() |
1caa3107b3 | ||
![]() |
5590f7385a | ||
![]() |
037c73c387 | ||
![]() |
ca3354bdea | ||
![]() |
da7419c035 | ||
![]() |
88fbba3e1e | ||
![]() |
2cd5c00923 | ||
![]() |
3d624d204f | ||
![]() |
428c63087a | ||
![]() |
9ab21024e2 | ||
![]() |
a4703b1220 | ||
![]() |
9493da2536 | ||
![]() |
dc78f4c58a | ||
![]() |
bedc9fe665 | ||
![]() |
ff7230c0cf | ||
![]() |
033647c8b1 | ||
![]() |
43c768f4b7 | ||
![]() |
3270aa106b | ||
![]() |
2dd9414bbb | ||
![]() |
30a9bfe38f | ||
![]() |
3370d3f57e | ||
![]() |
3df90e3e33 | ||
![]() |
89e2ee70e2 | ||
![]() |
2111cf8044 | ||
![]() |
811abf5d00 | ||
![]() |
37e14ee1ac | ||
![]() |
68f0973339 | ||
![]() |
9d1d52d51f | ||
![]() |
27e3b34d6d | ||
![]() |
c523e22575 | ||
![]() |
5064ad6cad | ||
![]() |
990a620d52 | ||
![]() |
a9996db35c | ||
![]() |
ddc5b4422d | ||
![]() |
b2a02861bb | ||
![]() |
1df9c3437f | ||
![]() |
9cc6bc26a2 | ||
![]() |
94ce7f5040 | ||
![]() |
5bea24526f | ||
![]() |
57b27023a4 | ||
![]() |
ceb260927b | ||
![]() |
1c362ccb11 | ||
![]() |
2c2eb911e1 | ||
![]() |
d1e0266952 | ||
![]() |
8986c21d73 | ||
![]() |
64532902f9 | ||
![]() |
568a535e3f | ||
![]() |
cd5e7579be | ||
![]() |
b559b99c8f | ||
![]() |
a39a6c4ea7 | ||
![]() |
4b5e071141 | ||
![]() |
2a64c6a402 | ||
![]() |
aea10bf876 | ||
![]() |
2088d407a2 | ||
![]() |
cc14384818 | ||
![]() |
3d29e0d641 | ||
![]() |
ad2b493171 | ||
![]() |
5bbcb43ad3 | ||
![]() |
fe8548853d | ||
![]() |
4a17099cc3 | ||
![]() |
822b763222 | ||
![]() |
28aa1a64d4 | ||
![]() |
e3ab326228 | ||
![]() |
6934703ffb | ||
![]() |
53fb5963eb | ||
![]() |
022aa496a5 | ||
![]() |
d424d0e54e | ||
![]() |
762893d4a6 | ||
![]() |
addf6f2686 | ||
![]() |
ce4b812b52 | ||
![]() |
afaaf04fa9 | ||
![]() |
9f49461183 | ||
![]() |
4081597731 | ||
![]() |
d42e5421dc | ||
![]() |
80a6eab2db | ||
![]() |
916da02326 | ||
![]() |
9199800bbd | ||
![]() |
838a0086cf | ||
![]() |
ab74738609 | ||
![]() |
90c28783a0 | ||
![]() |
35406a8984 | ||
![]() |
6a1b0331a9 | ||
![]() |
258600eede | ||
![]() |
b7794cdac8 | ||
![]() |
a6580d4704 | ||
![]() |
4d0ee4e169 | ||
![]() |
6bc2e660e8 | ||
![]() |
8665ccb661 | ||
![]() |
6610c50414 | ||
![]() |
b0c63a6554 | ||
![]() |
77d2b9a87a | ||
![]() |
f931067bf2 | ||
![]() |
7d66779c06 | ||
![]() |
6cfcf59781 | ||
![]() |
544258f16a | ||
![]() |
b48bdc9e19 | ||
![]() |
cf4f281f54 | ||
![]() |
673faf6044 | ||
![]() |
e86cdbbf23 | ||
![]() |
0c4b56d885 | ||
![]() |
ebdba2c6a7 | ||
![]() |
7d68d4564c | ||
![]() |
06fd0f892e | ||
![]() |
b13629062a | ||
![]() |
a284c3e76d | ||
![]() |
efcdd64893 | ||
![]() |
e2b87ade06 | ||
![]() |
96c1fb7b0b | ||
![]() |
4d5eff11b4 | ||
![]() |
8db12b72ec | ||
![]() |
9900d348d7 | ||
![]() |
85c454bdb0 |
52
.github/workflows/bootstrap.yml
vendored
52
.github/workflows/bootstrap.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -61,6 +61,14 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
@@ -73,7 +81,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -90,6 +98,14 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
@@ -127,7 +143,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -143,7 +159,7 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap untrust github-actions
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -152,13 +168,13 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -172,10 +188,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
@@ -202,6 +218,14 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
@@ -231,6 +255,14 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
@@ -244,7 +276,7 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -277,6 +309,6 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
48
.github/workflows/build-containers.yml
vendored
48
.github/workflows/build-containers.yml
vendored
@@ -13,6 +13,8 @@ on:
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
- 'share/spack/docker/*'
|
||||
- 'share/templates/container/*'
|
||||
- 'lib/spack/spack/container/*'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
@@ -29,11 +31,17 @@ jobs:
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le']]
|
||||
# Meaning of the various items in the matrix list
|
||||
# 0: Container name (e.g. ubuntu-bionic)
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:18.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -52,14 +60,26 @@ jobs:
|
||||
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Check ${{ matrix.dockerfile[1] }} Exists
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
run: |
|
||||
printf "Preparing to build ${{ env.container }} from ${{ matrix.dockerfile[1] }}"
|
||||
if [ ! -f "share/spack/docker/${{ matrix.dockerfile[1]}}" ]; then
|
||||
printf "Dockerfile ${{ matrix.dockerfile[0]}} does not exist"
|
||||
.github/workflows/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
|
||||
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
|
||||
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
|
||||
@@ -80,12 +100,14 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
|
||||
uses: docker/build-push-action@7f9d37fa544684fb73bfe4835ed7214c255ce02b # @v2
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a # @v2
|
||||
with:
|
||||
file: share/spack/docker/${{matrix.dockerfile[1]}}
|
||||
platforms: ${{ matrix.dockerfile[2] }}
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: |
|
||||
spack/${{ env.container }}
|
||||
spack/${{ env.versioned }}
|
||||
|
7
.github/workflows/execute_installer.ps1
vendored
Normal file
7
.github/workflows/execute_installer.ps1
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
$ proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
|
||||
if ($proc.ExitCode -ne 0) {
|
||||
Write-Warning "$_ exited with status code $($proc.ExitCode)"
|
||||
}
|
9
.github/workflows/generate_spack_yaml_containerize.sh
vendored
Executable file
9
.github/workflows/generate_spack_yaml_containerize.sh
vendored
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
(echo "spack:" \
|
||||
&& echo " specs: []" \
|
||||
&& echo " container:" \
|
||||
&& echo " format: docker" \
|
||||
&& echo " images:" \
|
||||
&& echo " os: \"${SPACK_YAML_OS}\"" \
|
||||
&& echo " spack:" \
|
||||
&& echo " ref: ${GITHUB_REF}") > spack.yaml
|
6
.github/workflows/macos_python.yml
vendored
6
.github/workflows/macos_python.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
11
.github/workflows/setup_git.ps1
vendored
Normal file
11
.github/workflows/setup_git.ps1
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
}
|
4
.github/workflows/system_shortcut_check.ps1
vendored
Normal file
4
.github/workflows/system_shortcut_check.ps1
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
param ($systemFolder, $shortcut)
|
||||
|
||||
$start = [System.Environment]::GetFolderPath("$systemFolder")
|
||||
Invoke-Item "$start\Programs\Spack\$shortcut"
|
36
.github/workflows/unit_tests.yaml
vendored
36
.github/workflows/unit_tests.yaml
vendored
@@ -16,9 +16,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
@@ -34,9 +34,9 @@ jobs:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
concretizer: ['clingo']
|
||||
include:
|
||||
- python-version: 2.7
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -162,7 +162,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -174,9 +174,9 @@ jobs:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -200,7 +200,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -240,9 +240,9 @@ jobs:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -274,7 +274,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -289,7 +289,7 @@ jobs:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
@@ -332,9 +332,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
||||
@@ -350,7 +350,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
188
.github/workflows/windows_python.yml
vendored
Normal file
188
.github/workflows/windows_python.yml
vendored
Normal file
@@ -0,0 +1,188 @@
|
||||
name: windows tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/lib/spack/spack/ spack/lib/spack/llnl/ spack/bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t='2.7-' -t='3.5-' -v spack/var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black pywin32 types-python-dateutil
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Run style tests
|
||||
run: |
|
||||
spack style
|
||||
- name: Verify license headers
|
||||
run: |
|
||||
python spack\bin\spack license verify
|
||||
unittest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test --verbose --ignore=lib/spack/spack/test/cmd
|
||||
unittest-cmd:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack unit-test lib/spack/spack/test/cmd --verbose
|
||||
buildtest:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack install abseil-cpp
|
||||
generate-installer-test:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: generate-installer-test
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
16
CHANGELOG.md
16
CHANGELOG.md
@@ -1,3 +1,19 @@
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
* Fix --reuse with upstreams set in an environment (#29680)
|
||||
* config add: fix parsing of validator error to infer type from oneOf (#29475)
|
||||
* Fix spack -C command_line_scope used in conjunction with other flags (#28418)
|
||||
* Use Spec.constrain to construct spec lists for stacks (#28783)
|
||||
* Fix bug occurring when searching for inherited patches in packages (#29574)
|
||||
* Fixed a few bugs when manipulating symlinks (#28318, #29515, #29636)
|
||||
* Fixed a few minor bugs affecting command prompt, terminal title and argument completion (#28279, #28278, #28939, #29405, #29070, #29402)
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
20
bin/haspywin.py
Normal file
20
bin/haspywin.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def getpywin():
|
||||
try:
|
||||
import win32con # noqa
|
||||
except ImportError:
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call(
|
||||
[sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
getpywin()
|
223
bin/spack.bat
Normal file
223
bin/spack.bat
Normal file
@@ -0,0 +1,223 @@
|
||||
:: Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
:: Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
::
|
||||
:: SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
::#######################################################################
|
||||
::
|
||||
:: This file is part of Spack and sets up the spack environment for batch,
|
||||
:: This includes environment modules and lmod support,
|
||||
:: and it also puts spack in your path. The script also checks that at least
|
||||
:: module support exists, and provides suggestions if it doesn't. Source
|
||||
:: it like this:
|
||||
::
|
||||
:: . /path/to/spack/install/spack_cmd.bat
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
:: 'spack load' and 'spack unload' to shell functions. This in turn
|
||||
:: allows them to be used to invoke environment modules functions.
|
||||
::
|
||||
:: 'spack load' is smarter than just 'load' because it converts its
|
||||
:: arguments into a unique Spack spec that is then passed to module
|
||||
:: commands. This allows the user to use packages without knowing all
|
||||
:: their installation details.
|
||||
::
|
||||
:: e.g., rather than requiring a full spec for libelf, the user can type:
|
||||
::
|
||||
:: spack load libelf
|
||||
::
|
||||
:: This will first find the available libelf module file and use a
|
||||
:: matching one. If there are two versions of libelf, the user would
|
||||
:: need to be more specific, e.g.:
|
||||
::
|
||||
:: spack load libelf@0.8.13
|
||||
::
|
||||
:: This is very similar to how regular spack commands work and it
|
||||
:: avoids the need to come up with a user-friendly naming scheme for
|
||||
:: spack module files.
|
||||
::#######################################################################
|
||||
|
||||
:_sp_shell_wrapper
|
||||
set "_sp_flags="
|
||||
set "_sp_args="
|
||||
set "_sp_subcommand="
|
||||
setlocal enabledelayedexpansion
|
||||
:: commands have the form '[flags] [subcommand] [args]'
|
||||
:: flags will always start with '-', e.g. --help or -V
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
for %%x in (%*) do (
|
||||
set t="%%~x"
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
:: We already have a subcommand, processing args now
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
|
||||
:: --help, -h and -V flags don't require further output parsing.
|
||||
:: If we encounter, execute and exit
|
||||
if defined _sp_flags (
|
||||
if NOT "%_sp_flags%"=="%_sp_flags:-h=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
) else if NOT "%_sp_flags%"=="%_sp_flags:--help=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
) else if NOT "%_sp_flags%"=="%_sp_flags:-V=%" (
|
||||
python "%spack%" %_sp_flags%
|
||||
exit /B 0
|
||||
)
|
||||
)
|
||||
:: pass parsed variables outside of local scope. Need to do
|
||||
:: this because delayedexpansion can only be set by setlocal
|
||||
echo %_sp_flags%>flags
|
||||
echo %_sp_args%>args
|
||||
echo %_sp_subcommand%>subcmd
|
||||
endlocal
|
||||
set /p _sp_subcommand=<subcmd
|
||||
set /p _sp_flags=<flags
|
||||
set /p _sp_args=<args
|
||||
set str_subcommand=%_sp_subcommand:"='%
|
||||
set str_flags=%_sp_flags:"='%
|
||||
set str_args=%_sp_args:"='%
|
||||
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
||||
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
|
||||
if "%str_args%"=="ECHO is off." (set "_sp_args=")
|
||||
del subcmd
|
||||
del flags
|
||||
del args
|
||||
|
||||
:: Filter out some commands. For any others, just run the command.
|
||||
if "%_sp_subcommand%" == "cd" (
|
||||
goto :case_cd
|
||||
) else if "%_sp_subcommand%" == "env" (
|
||||
goto :case_env
|
||||
) else if "%_sp_subcommand%" == "load" (
|
||||
goto :case_load
|
||||
) else if "%_sp_subcommand%" == "unload" (
|
||||
goto :case_load
|
||||
) else (
|
||||
goto :default_case
|
||||
)
|
||||
|
||||
::#######################################################################
|
||||
|
||||
:case_cd
|
||||
:: Check for --help or -h
|
||||
:: TODO: This is not exactly the same as setup-env.
|
||||
:: In setup-env, '--help' or '-h' must follow the cd
|
||||
:: Here, they may be anywhere in the args
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
python "%spack%" cd -h
|
||||
goto :end_switch
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
python "%spack%" cd -h
|
||||
goto :end_switch
|
||||
)
|
||||
)
|
||||
|
||||
for /F "tokens=* USEBACKQ" %%F in (
|
||||
`python "%spack%" location %_sp_args%`) do (
|
||||
set "LOC=%%F"
|
||||
)
|
||||
for %%Z in ("%LOC%") do if EXIST %%~sZ\NUL (cd /d "%LOC%")
|
||||
goto :end_switch
|
||||
|
||||
:case_env
|
||||
:: If no args or args contain --bat or -h/--help: just execute.
|
||||
if NOT defined _sp_args (
|
||||
goto :default_case
|
||||
)else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
|
||||
) do %%I
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
|
||||
) do %%I
|
||||
) else (
|
||||
goto :default_case
|
||||
)
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
)
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
||||
:end_switch
|
||||
exit /B %ERRORLEVEL%
|
||||
|
||||
|
||||
::########################################################################
|
||||
:: Prepends directories to path, if they exist.
|
||||
:: pathadd /path/to/dir # add to PATH
|
||||
:: or pathadd OTHERPATH /path/to/dir # add to OTHERPATH
|
||||
::########################################################################
|
||||
|
||||
:_spack_pathadd
|
||||
set "_pa_varname=PATH"
|
||||
set "_pa_new_path=%~1"
|
||||
if NOT "%~2" == "" (
|
||||
set "_pa_varname=%~1"
|
||||
set "_pa_new_path=%~2"
|
||||
)
|
||||
set "_pa_oldvalue=%_pa_varname%"
|
||||
for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
|
||||
if defined %_pa_oldvalue% (
|
||||
set "_pa_varname=%_pa_new_path%:%_pa_oldvalue%"
|
||||
) else (
|
||||
set "_pa_varname=%_pa_new_path%"
|
||||
)
|
||||
)
|
||||
exit /b 0
|
||||
|
||||
:: set module system roots
|
||||
:_sp_multi_pathadd
|
||||
for %%I in (%~2) do (
|
||||
for %%Z in (%_sp_compatible_sys_types%) do (
|
||||
:pathadd "%~1" "%%I\%%Z"
|
||||
)
|
||||
)
|
||||
exit /B %ERRORLEVEL%
|
72
bin/spack_cmd.bat
Normal file
72
bin/spack_cmd.bat
Normal file
@@ -0,0 +1,72 @@
|
||||
@ECHO OFF
|
||||
setlocal EnableDelayedExpansion
|
||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||
::
|
||||
:: source_dir/spack/bin/spack_cmd.bat
|
||||
::
|
||||
pushd %~dp0..
|
||||
set SPACK_ROOT=%CD%
|
||||
pushd %CD%\..
|
||||
set spackinstdir=%CD%
|
||||
popd
|
||||
|
||||
|
||||
:: Check if Python is on the PATH
|
||||
if not defined python_pf_ver (
|
||||
(for /f "delims=" %%F in ('where python.exe') do (
|
||||
set "python_pf_ver=%%F"
|
||||
goto :found_python
|
||||
) ) 2> NUL
|
||||
)
|
||||
:found_python
|
||||
if not defined python_pf_ver (
|
||||
:: If not, look for Python from the Spack installer
|
||||
:get_builtin
|
||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||
set "python_ver=%%g")) 2> NUL
|
||||
|
||||
if not defined python_ver (
|
||||
echo Python was not found on your system.
|
||||
echo Please install Python or add Python to your PATH.
|
||||
) else (
|
||||
set "py_path=!spackinstdir!\!python_ver!"
|
||||
set "py_exe=!py_path!\python.exe"
|
||||
)
|
||||
goto :exitpoint
|
||||
) else (
|
||||
:: Python is already on the path
|
||||
set "py_exe=!python_pf_ver!"
|
||||
(for /F "tokens=* USEBACKQ" %%F in (
|
||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||
goto :exitpoint
|
||||
)
|
||||
:exitpoint
|
||||
|
||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||
if defined py_path (
|
||||
set "PATH=%py_path%;%PATH%"
|
||||
)
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\spack" external find python >NUL
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
DOSKEY spacktivate=spack env activate $*
|
||||
|
||||
@echo **********************************************************************
|
||||
@echo ** Spack Package Manager
|
||||
@echo **********************************************************************
|
||||
|
||||
IF "%1"=="" GOTO CONTINUE
|
||||
set
|
||||
GOTO:EOF
|
||||
|
||||
:continue
|
||||
set PROMPT=[spack] %PROMPT%
|
||||
%comspec% /k
|
10
bin/spack_pwsh.ps1
Normal file
10
bin/spack_pwsh.ps1
Normal file
@@ -0,0 +1,10 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
$Env:SPACK_PS1_PATH="$PSScriptRoot\..\share\spack\setup-env.ps1"
|
||||
& (Get-Process -Id $pid).Path -NoExit {
|
||||
. $Env:SPACK_PS1_PATH ;
|
||||
Push-Location $ENV:SPACK_ROOT
|
||||
}
|
@@ -9,15 +9,24 @@ bootstrap:
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions'
|
||||
- name: 'github-actions-v0.2'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
- name: 'github-actions-v0.1'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
@@ -28,5 +37,5 @@ bootstrap:
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions: true
|
||||
github-actions-v0.2: true
|
||||
spack-install: true
|
||||
|
@@ -35,13 +35,10 @@ modules:
|
||||
|
||||
# These are configurations for the module set named "default"
|
||||
default:
|
||||
# These values are defaulted in the code. They are not defaulted here so
|
||||
# that we can enable backwards compatibility with the old syntax more
|
||||
# easily (old value is in the config yaml, config:module_roots)
|
||||
# Where to install modules
|
||||
# roots:
|
||||
# tcl: $spack/share/spack/modules
|
||||
# lmod: $spack/share/spack/lmod
|
||||
roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
# What type of modules to use
|
||||
enable:
|
||||
- tcl
|
||||
|
5
etc/spack/defaults/windows/config.yaml
Normal file
5
etc/spack/defaults/windows/config.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: original
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
@@ -1723,8 +1723,8 @@ Activating Extensions in a View
|
||||
|
||||
Another way to use extensions is to create a view, which merges the
|
||||
python installation along with the extensions into a single prefix.
|
||||
See :ref:`filesystem-views` for a more in-depth description of views and
|
||||
:ref:`cmd-spack-view` for usage of the ``spack view`` command.
|
||||
See :ref:`configuring_environment_views` for a more in-depth description
|
||||
of views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions Globally
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _build-settings:
|
||||
|
||||
===================
|
||||
Build Customization
|
||||
===================
|
||||
================================
|
||||
Package Settings (packages.yaml)
|
||||
================================
|
||||
|
||||
Spack allows you to customize how your software is built through the
|
||||
``packages.yaml`` file. Using it, you can make Spack prefer particular
|
||||
|
@@ -51,6 +51,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/perlpackage
|
||||
build_systems/pythonpackage
|
||||
build_systems/rpackage
|
||||
build_systems/racketpackage
|
||||
build_systems/rubypackage
|
||||
|
||||
.. toctree::
|
||||
|
@@ -433,7 +433,7 @@ For example:
|
||||
.. code-block:: python
|
||||
|
||||
variant('profiler', when='@2.0:')
|
||||
config_args += self.with_or_without('profiler)
|
||||
config_args += self.with_or_without('profiler')
|
||||
|
||||
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
|
||||
below ``2.0``.
|
||||
|
@@ -159,6 +159,85 @@ and CMake simply ignores the empty command line argument. For example the follow
|
||||
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
|
||||
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CMake arguments provided by Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The following default arguments are controlled by Spack:
|
||||
|
||||
|
||||
``CMAKE_INSTALL_PREFIX``
|
||||
------------------------
|
||||
|
||||
Is set to the the package's install directory.
|
||||
|
||||
|
||||
``CMAKE_PREFIX_PATH``
|
||||
---------------------
|
||||
|
||||
CMake finds dependencies through calls to ``find_package()``, ``find_program()``,
|
||||
``find_library()``, ``find_file()``, and ``find_path()``, which use a list of search
|
||||
paths from ``CMAKE_PREFIX_PATH``. Spack sets this variable to a list of prefixes of the
|
||||
spec's transitive dependencies.
|
||||
|
||||
For troubleshooting cases where CMake fails to find a dependency, add the
|
||||
``--debug-find`` flag to ``cmake_args``.
|
||||
|
||||
``CMAKE_BUILD_TYPE``
|
||||
--------------------
|
||||
|
||||
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
|
||||
dictate which level of optimization to use. In order to ensure
|
||||
uniformity across packages, the ``CMakePackage`` base class adds
|
||||
a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
|
||||
|
||||
``CMAKE_INSTALL_RPATH`` and ``CMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``
|
||||
--------------------------------------------------------------------
|
||||
|
||||
CMake uses different RPATHs during the build and after installation, so that executables
|
||||
can locate the libraries they're linked to during the build, and installed executables
|
||||
do not have RPATHs to build directories. In Spack, we have to make sure that RPATHs are
|
||||
set properly after installation.
|
||||
|
||||
Spack sets ``CMAKE_INSTALL_RPATH`` to a list of ``<prefix>/lib`` or ``<prefix>/lib64``
|
||||
directories of the spec's link-type dependencies. Apart from that, it sets
|
||||
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, which should add RPATHs for directories of
|
||||
linked libraries not in the directories covered by ``CMAKE_INSTALL_RPATH``.
|
||||
|
||||
Usually it's enough to set only ``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON``, but the
|
||||
reason to provide both options is that packages may dynamically open shared libraries,
|
||||
which CMake cannot detect. In those cases, the RPATHs from ``CMAKE_INSTALL_RPATH`` are
|
||||
used as search paths.
|
||||
|
||||
.. note::
|
||||
|
||||
Some packages provide stub libraries, which contain an interface for linking without
|
||||
an implementation. When using such libraries, it's best to override the option
|
||||
``-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=OFF`` in ``cmake_args``, so that stub libraries
|
||||
are not used at runtime.
|
||||
|
||||
|
||||
^^^^^^^^^^
|
||||
Generators
|
||||
@@ -196,36 +275,6 @@ generators, but it should be simple to add support for alternative
|
||||
generators. For more information on CMake generators, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
CMAKE_BUILD_TYPE
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Every CMake-based package accepts a ``-DCMAKE_BUILD_TYPE`` flag to
|
||||
dictate which level of optimization to use. In order to ensure
|
||||
uniformity across packages, the ``CMakePackage`` base class adds
|
||||
a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
CMakeLists.txt in a sub-directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -645,8 +645,7 @@ are not yet in Spack, and Spack contains many Python packages that are
|
||||
not yet in Anaconda. The main advantage of Spack over Anaconda is its
|
||||
ability to choose a specific compiler and BLAS/LAPACK or MPI library.
|
||||
Spack also has better platform support for supercomputers, and can build
|
||||
optimized binaries for your specific microarchitecture. On the other hand,
|
||||
Anaconda offers Windows support.
|
||||
optimized binaries for your specific microarchitecture.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
|
46
lib/spack/docs/build_systems/racketpackage.rst
Normal file
46
lib/spack/docs/build_systems/racketpackage.rst
Normal file
@@ -0,0 +1,46 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _racketpackage:
|
||||
|
||||
-------------
|
||||
RacketPackage
|
||||
-------------
|
||||
|
||||
Much like Python, Racket packages and modules have their own special build system.
|
||||
To learn more about the specifics of Racket package system, please refer to the
|
||||
`Racket Docs <https://docs.racket-lang.org/pkg/cmdline.html>`_.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``RacketPackage`` base class provides an ``install`` phase that
|
||||
can be overridden, corresponding to the use of:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ raco pkg install
|
||||
|
||||
^^^^^^^
|
||||
Caveats
|
||||
^^^^^^^
|
||||
|
||||
In principle, ``raco`` supports a second, ``setup`` phase; however, we have not
|
||||
implemented this separately, as in normal circumstances, ``install`` also handles
|
||||
running ``setup`` automatically.
|
||||
|
||||
Unlike Python, Racket currently on supports two installation scopes for packages, user
|
||||
or system, and keeps a registry of installed packages at each scope in its configuration files.
|
||||
This means we can't simply compose a "``RACKET_PATH``" environment variable listing all of the
|
||||
places packages are installed, and update this at will.
|
||||
|
||||
Unfortunately this means that all currently installed packages which extend Racket via ``raco pkg install``
|
||||
are accessible whenever Racket is accessible.
|
||||
|
||||
Additionally, because Spack does not implement uninstall hooks, uninstalling a Spack ``rkt-`` package
|
||||
will have no effect on the ``raco`` installed packages visible to your Racket installation.
|
||||
Instead, you must manually run ``raco pkg remove`` to keep the two package managers in a mutually
|
||||
consistent state.
|
@@ -180,6 +180,7 @@ def setup(sphinx):
|
||||
('py:class', '_frozen_importlib_external.SourceFileLoader'),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
('py:class', 'spack.provider_index._IndexBase'),
|
||||
('py:class', 'spack.repo._PrependFileLoader'),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _config-yaml:
|
||||
|
||||
==============
|
||||
Basic Settings
|
||||
==============
|
||||
============================
|
||||
Spack Settings (config.yaml)
|
||||
============================
|
||||
|
||||
Spack's basic configuration options are set in ``config.yaml``. You can
|
||||
see the default settings by looking at
|
||||
@@ -72,21 +72,6 @@ used to configure module names.
|
||||
packages have been installed will prevent Spack from being
|
||||
able to find the old installation directories.
|
||||
|
||||
--------------------
|
||||
``module_roots``
|
||||
--------------------
|
||||
|
||||
Controls where Spack installs generated module files. You can customize
|
||||
the location for each type of module. e.g.:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
|
||||
See :ref:`modules` for details.
|
||||
|
||||
--------------------
|
||||
``build_stage``
|
||||
--------------------
|
||||
|
@@ -37,8 +37,6 @@ Here is an example ``config.yaml`` file:
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -253,8 +251,6 @@ your configurations look like this:
|
||||
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -278,8 +274,6 @@ command:
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -345,13 +339,11 @@ higher-precedence scope is *prepended* to the defaults. ``spack config
|
||||
get config`` shows the result:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 7-10
|
||||
:emphasize-lines: 5-8
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user/spack
|
||||
- ~/mystage
|
||||
@@ -375,13 +367,11 @@ user config looked like this:
|
||||
The merged configuration would look like this:
|
||||
|
||||
.. code-block:: console
|
||||
:emphasize-lines: 7-8
|
||||
:emphasize-lines: 5-6
|
||||
|
||||
$ spack config get config
|
||||
config:
|
||||
install_tree: /some/other/directory
|
||||
module_roots:
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- /lustre-scratch/$user/spack
|
||||
- ~/mystage
|
||||
@@ -502,9 +492,6 @@ account all scopes. For example, to see the fully merged
|
||||
template_dirs:
|
||||
- $spack/templates
|
||||
directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
module_roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
@@ -552,9 +539,6 @@ down the problem:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:23 template_dirs:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:24 - $spack/templates
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:28 directory_layout: {architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:32 module_roots:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:33 tcl: $spack/share/spack/modules
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:34 lmod: $spack/share/spack/lmod
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:49 build_stage:
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:50 - $tempdir/$user/spack-stage
|
||||
/home/myuser/spack/etc/spack/defaults/config.yaml:51 - ~/.spack/stage
|
||||
|
@@ -1057,39 +1057,39 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.13.0``, ``0.14.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.13.1``, ``0.13.2``, ``0.13.3``, etc.). Here is a
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version)
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
|
|
||||
o merge v0.14.1 into develop
|
||||
|\
|
||||
| o branch: releases/v0.14, tag: v0.14.1
|
||||
o | merge v0.14.0 into develop
|
||||
|\|
|
||||
| o tag: v0.14.0
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o merge v0.13.2 into develop
|
||||
|\
|
||||
| o branch: releases/v0.13, tag: v0.13.2
|
||||
o | merge v0.13.1 into develop
|
||||
|\|
|
||||
| o tag: v0.13.1
|
||||
o | merge v0.13.0 into develop
|
||||
|\|
|
||||
| o tag: v0.13.0
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
||||
The ``develop`` branch has the latest contributions, and nearly all pull
|
||||
requests target ``develop``.
|
||||
requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.14`` has ``0.14.x`` versions of Spack, and
|
||||
``releases/v0.13`` has ``0.13.x`` versions. A major release is the first
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -1100,12 +1100,20 @@ packages. They should generally only contain fixes to the Spack core.
|
||||
However, sometimes priorities are such that new functionality needs to
|
||||
be added to a minor release.
|
||||
|
||||
Both major and minor releases are tagged. After each release, we merge
|
||||
the release branch back into ``develop`` so that the version bump and any
|
||||
other release-specific changes are visible in the mainline. As a
|
||||
convenience, we also tag the latest release as ``releases/latest``,
|
||||
so that users can easily check it out to get the latest
|
||||
stable version. See :ref:`merging-releases` for more details.
|
||||
Both major and minor releases are tagged. As a convenience, we also tag
|
||||
the latest release as ``releases/latest``, so that users can easily check
|
||||
it out to get the latest stable version. See :ref:`updating-latest-release`
|
||||
for more details.
|
||||
|
||||
.. note::
|
||||
|
||||
Older spack releases were merged **back** into develop so that we could
|
||||
do fancy things with tags, but since tarballs and many git checkouts do
|
||||
not have tags, this proved overly complex and confusing.
|
||||
|
||||
We have since converted to using `PEP 440 <https://peps.python.org/pep-0440/>`_
|
||||
compliant versions. `See here <https://github.com/spack/spack/pull/25267>`_ for
|
||||
details.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Scheduling work for releases
|
||||
@@ -1163,10 +1171,11 @@ completed, the steps to make the major release are:
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
branch if you are preparing the ``X.Y.0`` release.
|
||||
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
#. Remove the ``dev0`` development release segment from the version tuple in
|
||||
``lib/spack/spack/__init__.py``.
|
||||
|
||||
See `this example from 0.13.0
|
||||
<https://github.com/spack/spack/commit/8eeb64096c98b8a43d1c587f13ece743c864fba9>`_
|
||||
The version number itself should already be correct and should not be
|
||||
modified.
|
||||
|
||||
#. Update ``CHANGELOG.md`` with major highlights in bullet form.
|
||||
|
||||
@@ -1188,9 +1197,16 @@ completed, the steps to make the major release are:
|
||||
is outdated submit pull requests to ``develop`` as normal
|
||||
and keep rebasing the release branch on ``develop``.
|
||||
|
||||
#. Bump the major version in the ``develop`` branch.
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
#. Follow the steps in :ref:`updating-latest-release`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
@@ -1266,9 +1282,6 @@ completed, the steps to make the point release are:
|
||||
|
||||
#. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
|
||||
See `this example from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
|
||||
This is typically a summary of the commits you cherry-picked onto the
|
||||
@@ -1290,7 +1303,7 @@ completed, the steps to make the point release are:
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
#. Follow the steps in :ref:`merging-releases`.
|
||||
#. Follow the steps in :ref:`updating-latest-release`.
|
||||
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
@@ -1351,11 +1364,11 @@ Publishing a release on GitHub
|
||||
selectable in the versions menu.
|
||||
|
||||
|
||||
.. _merging-releases:
|
||||
.. _updating-latest-release:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating `releases/latest` and `develop`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating `releases/latest`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
@@ -1379,40 +1392,6 @@ To tag ``releases/latest``, do this:
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
|
||||
We also merge each release that we tag as ``releases/latest`` into ``develop``.
|
||||
Make sure to do this with a merge commit:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout develop
|
||||
$ git merge --no-ff -s ours vX.Y.Z # vX.Y.Z is the new release's tag
|
||||
$ git push
|
||||
|
||||
We merge back to ``develop`` because it:
|
||||
|
||||
* updates the version and ``CHANGELOG.md`` on ``develop``; and
|
||||
* ensures that your release tag is reachable from the head of
|
||||
``develop``.
|
||||
|
||||
We *must* use a real merge commit (via the ``--no-ff`` option) to
|
||||
ensure that the release tag is reachable from the tip of ``develop``.
|
||||
This is necessary for ``spack -V`` to work properly -- it uses ``git
|
||||
describe --tags`` to find the last reachable tag in the repository and
|
||||
reports how far we are from it. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -V
|
||||
0.14.2-1486-b80d5e74e5
|
||||
|
||||
This says that we are at commit ``b80d5e74e5``, which is 1,486 commits
|
||||
ahead of the ``0.14.2`` release.
|
||||
|
||||
We put this step last in the process because it's best to do it only once
|
||||
the release is complete and tagged. If you do it before you've tagged the
|
||||
release and later decide you want to tag some later commit, you'll need
|
||||
to merge again.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _environments:
|
||||
|
||||
============
|
||||
Environments
|
||||
============
|
||||
=========================
|
||||
Environments (spack.yaml)
|
||||
=========================
|
||||
|
||||
An environment is used to group together a set of specs for the
|
||||
purpose of building, rebuilding and deploying in a coherent fashion.
|
||||
@@ -384,18 +384,11 @@ Sourcing that file in Bash will make the environment available to the
|
||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||
file may also be copied out of the environment, renamed, etc.
|
||||
|
||||
----------
|
||||
spack.yaml
|
||||
----------
|
||||
|
||||
Spack environments can be customized at finer granularity by editing
|
||||
the ``spack.yaml`` manifest file directly.
|
||||
|
||||
.. _environment-configuration:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
------------------------
|
||||
Configuring Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
------------------------
|
||||
|
||||
A variety of Spack behaviors are changed through Spack configuration
|
||||
files, covered in more detail in the :ref:`configuration`
|
||||
@@ -417,9 +410,9 @@ environment can be specified by ``env:NAME`` (to affect environment
|
||||
``foo``, set ``--scope env:foo``). These commands will automatically
|
||||
manipulate configuration inline in the ``spack.yaml`` file.
|
||||
|
||||
"""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Inline configurations
|
||||
"""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Inline Environment-scope configuration is done using the same yaml
|
||||
format as standard Spack configuration scopes, covered in the
|
||||
@@ -440,9 +433,9 @@ a ``packages.yaml`` file) could contain:
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
|
||||
"""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Included configurations
|
||||
"""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
@@ -462,9 +455,9 @@ to make small changes to an individual Environment. Included configs
|
||||
listed earlier will have higher precedence, as the included configs are
|
||||
applied in reverse order.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-------------------------------
|
||||
Manually Editing the Specs List
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-------------------------------
|
||||
|
||||
The list of abstract/root specs in the Environment is maintained in
|
||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||
@@ -482,9 +475,9 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
"""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
"""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
@@ -510,9 +503,9 @@ which can currently take either one of the two allowed values ``together`` or ``
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
"""""""""""""
|
||||
^^^^^^^^^^^^^
|
||||
Spec Matrices
|
||||
"""""""""""""
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Entries in the ``specs`` list can be individual abstract specs or a
|
||||
spec matrix.
|
||||
@@ -572,9 +565,9 @@ This allows one to create toolchains out of combinations of
|
||||
constraints and apply them somewhat indiscriminately to packages,
|
||||
without regard for the applicability of the constraint.
|
||||
|
||||
""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Spec List References
|
||||
""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The last type of possible entry in the specs list is a reference.
|
||||
|
||||
@@ -674,9 +667,9 @@ The valid variables for a ``when`` clause are:
|
||||
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
||||
executable in the user's PATH).
|
||||
|
||||
""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
SpecLists as Constraints
|
||||
""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Dependencies and compilers in Spack can be both packages in an
|
||||
environment and constraints on other packages. References to SpecLists
|
||||
@@ -708,41 +701,41 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment-managed Views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
|
||||
Spack Environments can define filesystem views of their software,
|
||||
which are maintained as packages and can be installed and uninstalled from
|
||||
the Environment. Filesystem views provide an access point for packages
|
||||
from the filesystem for users who want to access those packages
|
||||
directly. For more information on filesystem views, see the section
|
||||
:ref:`filesystem-views`.
|
||||
|
||||
Spack Environment managed views are updated every time the environment
|
||||
is written out to the lock file ``spack.lock``, so the concrete
|
||||
environment and the view are always compatible.
|
||||
Spack Environments can define filesystem views, which provide a direct access point
|
||||
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||
Filesystem views are updated every time the environment is written out to the lock
|
||||
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||
The files of the view's installed packages are brought into the view by symbolic or
|
||||
hard links, referencing the original Spack installation, or by copy.
|
||||
|
||||
.. _configuring_environment_views:
|
||||
|
||||
"""""""""""""""""""""""""""""
|
||||
Configuring environment views
|
||||
"""""""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration in ``spack.yaml``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a view descriptor, headed
|
||||
by a name. The view descriptor contains the root of the view, and
|
||||
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||
by a name. Any number of views may be defined under the ``view`` heading.
|
||||
The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``. For example, in the following manifest
|
||||
``link_type``.
|
||||
|
||||
For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
All the dependencies of each root spec in the environment will be linked
|
||||
in the view due to the command ``link: all`` and the files in the view will
|
||||
be symlinks to the spack install directories.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
directories.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -758,16 +751,26 @@ be symlinks to the spack install directories.
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
For more information on using view projections, see the section on
|
||||
:ref:`adding_projections_to_views`. The default for the ``select`` and
|
||||
The default for the ``select`` and
|
||||
``exclude`` values is to select everything and exclude nothing. The
|
||||
default projection is the default view projection (``{}``). The ``link``
|
||||
defaults to ``all`` but can also be ``roots`` when only the root specs
|
||||
in the environment are desired in the view. The ``link_type`` defaults
|
||||
to ``symlink`` but can also take the value of ``hardlink`` or ``copy``.
|
||||
attribute allows the following values:
|
||||
|
||||
#. ``link: all`` include root specs with their transitive run and link type
|
||||
dependencies (default);
|
||||
#. ``link: run`` include root specs with their transitive run type dependencies;
|
||||
#. ``link: roots`` include root specs without their dependencies.
|
||||
|
||||
The ``link_type`` defaults to ``symlink`` but can also take the value
|
||||
of ``hardlink`` or ``copy``.
|
||||
|
||||
.. tip::
|
||||
|
||||
The option ``link: run`` can be used to create small environment views for
|
||||
Python packages. Python will be able to import packages *inside* of the view even
|
||||
when the environment is not activated, and linked libraries will be located
|
||||
*outside* of the view thanks to rpaths.
|
||||
|
||||
Any number of views may be defined under the ``view`` heading in a
|
||||
Spack Environment.
|
||||
|
||||
There are two shorthands for environments with a single view. If the
|
||||
environment at ``/path/to/env`` has a single view, with a root at
|
||||
@@ -833,9 +836,47 @@ regenerate`` will regenerate the views for the environment. This will
|
||||
apply any updates in the environment configuration that have not yet
|
||||
been applied.
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
.. _view_projections:
|
||||
|
||||
""""""""""""""""
|
||||
View Projections
|
||||
""""""""""""""""
|
||||
The default projection into a view is to link every package into the
|
||||
root of the view. The projections attribute is a mapping of partial specs to
|
||||
spec format strings, defined by the :meth:`~spack.spec.Spec.format`
|
||||
function, as shown in the example below:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
be the first non-``all`` entry that the spec satisfies, or ``all`` if
|
||||
there is an entry for ``all`` and no other entry is satisfied by the
|
||||
spec. Where the keyword ``all`` appears in the file does not
|
||||
matter.
|
||||
|
||||
Given the example above, the spec ``zlib@1.2.8``
|
||||
will be linked into ``/my/view/zlib-1.2.8/``, the spec
|
||||
``hdf5@1.8.10+mpi %gcc@4.9.3 ^mvapich2@2.2`` will be linked into
|
||||
``/my/view/hdf5-1.8.10/mvapich2-2.2-gcc-4.9.3``, and the spec
|
||||
``hdf5@1.8.10~mpi %gcc@4.9.3`` will be linked into
|
||||
``/my/view/hdf5-1.8.10/gcc-4.9.3``.
|
||||
|
||||
If the keyword ``all`` does not appear in the projections
|
||||
configuration file, any spec that does not satisfy any entry in the
|
||||
file will be linked into the root of the view as in a single-prefix
|
||||
view. Any entries that appear below the keyword ``all`` in the
|
||||
projections configuration file will not be used, as all specs will use
|
||||
the projection under ``all`` before reaching those entries.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating environment views
|
||||
""""""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack env activate`` command will put the default view for the
|
||||
environment into the user's path, in addition to activating the
|
||||
|
@@ -149,27 +149,28 @@ Spack fall back to bootstrapping from sources:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
$ spack bootstrap untrust github-actions-v0.2
|
||||
==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap list
|
||||
Name: github-actions UNTRUSTED
|
||||
Name: github-actions-v0.2 UNTRUSTED
|
||||
|
||||
Type: buildcache
|
||||
|
||||
Info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
|
||||
Description:
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
|
||||
[ ... ]
|
||||
|
||||
Name: spack-install TRUSTED
|
||||
|
||||
@@ -1516,3 +1517,238 @@ To ensure that Spack does not autodetect the Cray programming
|
||||
environment, unset the environment variable ``MODULEPATH``. This
|
||||
will cause Spack to treat a linux container on a Cray system as a base
|
||||
linux distro.
|
||||
|
||||
.. _windows_support:
|
||||
|
||||
----------------
|
||||
Spack On Windows
|
||||
----------------
|
||||
|
||||
Windows support for Spack is currently under development. While this work is still in an early stage,
|
||||
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
|
||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 1: Install prerequisites
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use Spack on Windows, you will need the following packages:
|
||||
|
||||
Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Git
|
||||
|
||||
Optional:
|
||||
* Intel Fortran (needed for some packages)
|
||||
|
||||
.. note::
|
||||
|
||||
Currently MSVC is the only compiler tested for C/C++ projects. Intel OneAPI provides Fortran support.
|
||||
|
||||
"""""""""""""""""""""""
|
||||
Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
depending on installation type (Professional, Build Tools, etc.) The other required component is
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
||||
"""""""""""""
|
||||
Intel Fortran
|
||||
"""""""""""""
|
||||
|
||||
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
|
||||
The suite is free to download from Intel's website, located at
|
||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
|
||||
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
|
||||
compiler's (ifort's) frontend and runtime libraries by using LLVM.
|
||||
|
||||
""""""
|
||||
Python
|
||||
""""""
|
||||
|
||||
As Spack is a Python-based package, an installation of Python will be needed to run it.
|
||||
Python 3 can be downloaded and installed from the Windows Store, and will be automatically added
|
||||
to your ``PATH`` in this case.
|
||||
|
||||
.. note::
|
||||
Spack currently supports Python versions later than 3.2 inclusive.
|
||||
|
||||
"""
|
||||
Git
|
||||
"""
|
||||
|
||||
A bash console and GUI can be downloaded from https://git-scm.com/downloads.
|
||||
If you are unfamiliar with Git, there are a myriad of resources online to help
|
||||
guide you through checking out repositories and switching development branches.
|
||||
|
||||
When given the option of adjusting your ``PATH``, choose the ``Git from the
|
||||
command line and also from 3rd-party software`` option. This will automatically
|
||||
update your ``PATH`` variable to include the ``git`` command.
|
||||
|
||||
Spack support on Windows is currently dependent on installing the Git for Windows project
|
||||
as the project providing Git support on Windows. This is additionally the recommended method
|
||||
for installing Git on Windows, a link to which can be found above. Spack requires the
|
||||
utilities vendored by this project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 2: Install and setup Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We are now ready to get the Spack environment set up on our machine. We
|
||||
begin by using Git to clone the Spack repo, hosted at https://github.com/spack/spack.git
|
||||
into a desired directory, for our purposes today, called ``spack_install``.
|
||||
|
||||
In order to install Spack with Windows support, run the following one liner
|
||||
in a Windows CMD prompt.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git clone https://github.com/spack/spack.git
|
||||
|
||||
.. note::
|
||||
If you chose to install Spack into a directory on Windows that is set up to require Administrative
|
||||
Privleges, Spack will require elevated privleges to run.
|
||||
Administrative Privleges can be denoted either by default such as
|
||||
``C:\Program Files``, or aministrator applied administrative restrictions
|
||||
on a directory that spack installs files to such as ``C:\Users``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 3: Run and configure Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
|
||||
directory. This will provide a Windows command prompt with an environment properly set up with Spack
|
||||
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
|
||||
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
||||
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
||||
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
||||
|
||||
.. note::
|
||||
Alternatively, Powershell can be used in place of CMD
|
||||
|
||||
To configure Spack, first run the following command inside the Spack console:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack compiler find
|
||||
|
||||
This creates a ``.staging`` directory in our Spack prefix, along with a ``windows`` subdirectory
|
||||
containing a ``compilers.yaml`` file. On a fresh Windows install with the above packages
|
||||
installed, this command should only detect Microsoft Visual Studio and the Intel Fortran
|
||||
compiler will be integrated within the first version of MSVC present in the ``compilers.yaml``
|
||||
output.
|
||||
|
||||
Spack provides a default ``config.yaml`` file for Windows that it will use unless overridden.
|
||||
This file is located at ``etc\spack\defaults\windows\config.yaml``. You can read more on how to
|
||||
do this and write your own configuration files in the :ref:`Configuration Files<configuration>` section of our
|
||||
documentation. If you do this, pay particular attention to the ``build_stage`` block of the file
|
||||
as this specifies the directory that will temporarily hold the source code for the packages to
|
||||
be installed. This path name must be sufficiently short for compliance with cmd, otherwise you
|
||||
will see build errors during installation (particularly with CMake) tied to long path names.
|
||||
|
||||
To allow Spack use of external tools and dependencies already on your system, the
|
||||
external pieces of software must be described in the ``packages.yaml`` file.
|
||||
There are two methods to populate this file:
|
||||
|
||||
The first and easiest choice is to use Spack to find installation on your system. In
|
||||
the Spack terminal, run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
|
||||
The ``spack external find <name>`` will find executables on your system
|
||||
with the same name given. The command will store the items found in
|
||||
``packages.yaml`` in the ``.staging\`` directory.
|
||||
|
||||
Assuming that the command found CMake and Ninja executables in the previous
|
||||
step, continue to Step 4. If no executables were found, we may need to manually direct spack towards the CMake
|
||||
and Ninja installations we set up with Visual Studio. Therefore, your ``packages.yaml`` file will look something
|
||||
like this, with possibly slight variants in the paths to CMake and Ninja:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.19
|
||||
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake'
|
||||
buildable: False
|
||||
ninja:
|
||||
externals:
|
||||
- spec: ninja@1.8.2
|
||||
prefix: 'c:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\Common7\IDE\CommonExtensions\Microsoft\CMake\Ninja'
|
||||
buildable: False
|
||||
|
||||
You can also use an separate installation of CMake if you have one and prefer
|
||||
to use it. If you don't have a path to Ninja analogous to the above, then you can
|
||||
obtain it by running the Visual Studio Installer and following the instructions
|
||||
at the start of this section. Also note that .yaml files use spaces for indentation
|
||||
and not tabs, so ensure that this is the case when editing one directly.
|
||||
|
||||
|
||||
.. note:: Cygwin
|
||||
The use of Cygwin is not officially supported by Spack and is not tested.
|
||||
However Spack will not throw an error, so use if choosing to use Spack
|
||||
with Cygwin, know that no functionality is garunteed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Step 4: Use Spack
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once the configuration is complete, it is time to give the installation a test. Install a basic package though the
|
||||
Spack console via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install cpuinfo
|
||||
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should boostrap both packages
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Windows Compatible Packages
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
Many Spack packages are not currently compatible with Windows, due to Unix
|
||||
dependencies or incompatible build tools like autoconf. Here are several
|
||||
packages known to work on Windows:
|
||||
|
||||
* abseil-cpp
|
||||
* clingo
|
||||
* cpuinfo
|
||||
* cmake
|
||||
* glm
|
||||
* nasm
|
||||
* netlib-lapack (requires Intel Fortran)
|
||||
* ninja
|
||||
* openssl
|
||||
* perl
|
||||
* python
|
||||
* ruby
|
||||
* wrf
|
||||
* zlib
|
||||
|
||||
.. note::
|
||||
This is by no means a comprehensive list
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
For developers
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
The intent is to provide a Windows installer that will automatically set up
|
||||
Python, Git, and Spack, instead of requiring the user to do so manually.
|
||||
Instructions for creating the installer are at
|
||||
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
||||
|
||||
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
|
||||
|
@@ -54,9 +54,8 @@ or refer to the full manual below.
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
workflows
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
known_issues
|
||||
replace_conda_homebrew
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
@@ -1,77 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
============
|
||||
Known Issues
|
||||
============
|
||||
|
||||
This is a list of known bugs in Spack. It provides ways of getting around these
|
||||
problems if you encounter them.
|
||||
|
||||
---------------------------------------------------
|
||||
Variants are not properly forwarded to dependencies
|
||||
---------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed by Spack's new concretizer
|
||||
|
||||
Sometimes, a variant of a package can also affect how its dependencies are
|
||||
built. For example, in order to build MPI support for a package, it may
|
||||
require that its dependencies are also built with MPI support. In the
|
||||
``package.py``, this looks like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('hdf5~mpi', when='~mpi')
|
||||
depends_on('hdf5+mpi', when='+mpi')
|
||||
|
||||
Spack handles this situation properly for *immediate* dependencies, and
|
||||
builds ``hdf5`` with the same variant you used for the package that
|
||||
depends on it. However, for *indirect* dependencies (dependencies of
|
||||
dependencies), Spack does not backtrack up the DAG far enough to handle
|
||||
this. Users commonly run into this situation when trying to build R with
|
||||
X11 support:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X
|
||||
...
|
||||
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
|
||||
Package cairo requires variant ~X, but spec asked for +X
|
||||
|
||||
A workaround is to explicitly activate the variants of dependencies as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X ^cairo+X ^pango+X
|
||||
|
||||
See https://github.com/spack/spack/issues/267 and
|
||||
https://github.com/spack/spack/issues/2546 for further details.
|
||||
|
||||
-----------------------------------------------
|
||||
depends_on cannot handle recursive dependencies
|
||||
-----------------------------------------------
|
||||
|
||||
**Status:** Not yet a work in progress
|
||||
|
||||
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
|
||||
it currently cannot handle recursive dependencies. If the ``^`` sigil
|
||||
appears in a ``depends_on`` statement, the concretizer will hang.
|
||||
For example, something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
should be rewritten as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda', when='+cuda')
|
||||
depends_on('hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
See https://github.com/spack/spack/issues/17660 and
|
||||
https://github.com/spack/spack/issues/11160 for more details.
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _mirrors:
|
||||
|
||||
=======
|
||||
Mirrors
|
||||
=======
|
||||
======================
|
||||
Mirrors (mirrors.yaml)
|
||||
======================
|
||||
|
||||
Some sites may not have access to the internet for fetching packages.
|
||||
These sites will need a local repository of tarballs from which they
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _modules:
|
||||
|
||||
=======
|
||||
Modules
|
||||
=======
|
||||
======================
|
||||
Modules (modules.yaml)
|
||||
======================
|
||||
|
||||
The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is often embraced also by
|
||||
@@ -181,10 +181,7 @@ to the environment variables listed below the folder name.
|
||||
Spack modules can be configured for multiple module sets. The default
|
||||
module set is named ``default``. All Spack commands which operate on
|
||||
modules default to apply the ``default`` module set, but can be
|
||||
applied to any module set in the configuration. Settings applied at
|
||||
the root of the configuration (e.g. ``modules:enable`` rather than
|
||||
``modules:default:enable``) are applied to the default module set for
|
||||
backwards compatibility.
|
||||
applied to any module set in the configuration.
|
||||
|
||||
"""""""""""""""""""""""""
|
||||
Changing the modules root
|
||||
@@ -378,7 +375,7 @@ most likely via the ``+blas`` variant specification.
|
||||
|
||||
The most heavyweight solution to module naming is to change the entire
|
||||
naming convention for module files. This uses the projections format
|
||||
covered in :ref:`adding_projections_to_views`.
|
||||
covered in :ref:`view_projections`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -540,8 +537,7 @@ configuration:
|
||||
|
||||
#. The configuration is for an :ref:`environment <environments>` and
|
||||
will never be applied outside the environment,
|
||||
#. The environment in question is configured to use a :ref:`view
|
||||
<filesystem-views>`,
|
||||
#. The environment in question is configured to use a view,
|
||||
#. The :ref:`environment view is configured
|
||||
<configuring_environment_views>` with a projection that ensures
|
||||
every package is linked to a unique directory,
|
||||
|
@@ -1423,6 +1423,37 @@ other similar operations:
|
||||
).with_default('auto').with_non_feature_values('auto'),
|
||||
)
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
Conditional Possible Values
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
There are cases where a variant may take multiple values, and the list of allowed values
|
||||
expand over time. Think for instance at the C++ standard with which we might compile
|
||||
Boost, which can take one of multiple possible values with the latest standards
|
||||
only available from a certain version on.
|
||||
|
||||
To model a similar situation we can use *conditional possible values* in the variant declaration:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
'cxxstd', default='98',
|
||||
values=(
|
||||
'98', '11', '14',
|
||||
# C++17 is not supported by Boost < 1.63.0.
|
||||
conditional('17', when='@1.63.0:'),
|
||||
# C++20/2a is not support by Boost < 1.73.0
|
||||
conditional('2a', '2b', when='@1.73.0:')
|
||||
),
|
||||
multi=False,
|
||||
description='Use the specified C++ standard when building.',
|
||||
)
|
||||
|
||||
The snippet above allows ``98``, ``11`` and ``14`` as unconditional possible values for the
|
||||
``cxxstd`` variant, while ``17`` requires a version greater or equal to ``1.63.0``
|
||||
and both ``2a`` and ``2b`` require a version greater or equal to ``1.73.0``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Conditional Variants
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -2543,7 +2574,7 @@ from being linked in at activation time.
|
||||
Views
|
||||
-----
|
||||
|
||||
As covered in :ref:`filesystem-views`, the ``spack view`` command can be
|
||||
The ``spack view`` command can be
|
||||
used to symlink a number of packages into a merged prefix. The methods of
|
||||
``PackageViewMixin`` can be overridden to customize how packages are added
|
||||
to views. Generally this can be used to create copies of specific files rather
|
||||
|
206
lib/spack/docs/replace_conda_homebrew.rst
Normal file
206
lib/spack/docs/replace_conda_homebrew.rst
Normal file
@@ -0,0 +1,206 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
=====================================
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
=====================================
|
||||
|
||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||
where users have diverse installation needs. But Spack can also be used to
|
||||
handle simple single-user installations on your laptop. Most macOS users are
|
||||
already familiar with package managers like Homebrew and Conda, where all
|
||||
installed packages are symlinked to a single central location like ``/usr/local``.
|
||||
In this section, we will show you how to emulate the behavior of Homebrew/Conda
|
||||
using :ref:`environments`!
|
||||
|
||||
-----
|
||||
Setup
|
||||
-----
|
||||
|
||||
First, let's create a new environment. We'll assume that Spack is already set up
|
||||
correctly, and that you've already sourced the setup script for your shell.
|
||||
To create a new environment, simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
|
||||
Here, *myenv* can be anything you want to name your environment. Next, we can add
|
||||
a list of packages we would like to install into our environment. Let's say we
|
||||
want a newer version of Bash than the one that comes with macOS, and we want a
|
||||
few Python libraries. We can run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv add bash@5 python py-numpy py-scipy py-matplotlib
|
||||
|
||||
Each package can be listed on a separate line, or combined into a single line like we did above.
|
||||
Notice that we're explicitly asking for Bash 5 here. You can use any spec
|
||||
you would normally use on the command line with other Spack commands.
|
||||
|
||||
Next, we want to manually configure a couple of things:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv config edit
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
# configuration settings.
|
||||
spack:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
|
||||
You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
|
||||
Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
# configuration settings.
|
||||
spack:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretization: together
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack symlinks all installations to ``/Users/me/spack/var/spack/environments/myenv/.spack-env/view``,
|
||||
which is the default when ``view: true``.
|
||||
You can actually change this to any directory you want. For example, Homebrew
|
||||
uses ``/usr/local``, while Conda uses ``/Users/me/anaconda``. In order to access
|
||||
files in these locations, you need to update ``PATH`` and other environment variables
|
||||
to point to them. Activating the Spack environment does this automatically, but
|
||||
you can also manually set them in your ``.bashrc``.
|
||||
|
||||
.. warning::
|
||||
|
||||
There are several reasons why you shouldn't use ``/usr/local``:
|
||||
|
||||
1. If you are on macOS 10.11+ (El Capitan and newer), Apple makes it hard
|
||||
for you. You may notice permissions issues on ``/usr/local`` due to their
|
||||
`System Integrity Protection <https://support.apple.com/en-us/HT204899>`_.
|
||||
By default, users don't have permissions to install anything in ``/usr/local``,
|
||||
and you can't even change this using ``sudo chown`` or ``sudo chmod``.
|
||||
2. Other package managers like Homebrew will try to install things to the
|
||||
same directory. If you plan on using Homebrew in conjunction with Spack,
|
||||
don't symlink things to ``/usr/local``.
|
||||
3. If you are on a shared workstation, or don't have sudo privileges, you
|
||||
can't do this.
|
||||
|
||||
If you still want to do this anyway, there are several ways around SIP.
|
||||
You could disable SIP by booting into recovery mode and running
|
||||
``csrutil disable``, but this is not recommended, as it can open up your OS
|
||||
to security vulnerabilities. Another technique is to run ``spack concretize``
|
||||
and ``spack install`` using ``sudo``. This is also not recommended.
|
||||
|
||||
The safest way I've found is to create your installation directories using
|
||||
sudo, then change ownership back to the user like so:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
for directory in .spack bin contrib include lib man share
|
||||
do
|
||||
sudo mkdir -p /usr/local/$directory
|
||||
sudo chown $(id -un):$(id -gn) /usr/local/$directory
|
||||
done
|
||||
|
||||
Depending on the packages you install in your environment, the exact list of
|
||||
directories you need to create may vary. You may also find some packages
|
||||
like Java libraries that install a single file to the installation prefix
|
||||
instead of in a subdirectory. In this case, the action is the same, just replace
|
||||
``mkdir -p`` with ``touch`` in the for-loop above.
|
||||
|
||||
But again, it's safer just to use the default symlink location.
|
||||
|
||||
|
||||
------------
|
||||
Installation
|
||||
------------
|
||||
|
||||
To actually concretize the environment, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv concretize
|
||||
|
||||
This will tell you which if any packages are already installed, and alert you
|
||||
to any conflicting specs.
|
||||
|
||||
To actually install these packages and symlink them to your ``view:``
|
||||
directory, simply run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv install
|
||||
$ spack env activate myenv
|
||||
|
||||
Now, when you type ``which python3``, it should find the one you just installed.
|
||||
|
||||
In order to change the default shell to our newer Bash installation, we first
|
||||
need to add it to this list of acceptable shells. Run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo vim /etc/shells
|
||||
|
||||
and add the absolute path to your bash executable. Then run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ chsh -s /path/to/bash
|
||||
|
||||
Now, when you log out and log back in, ``echo $SHELL`` should point to the
|
||||
newer version of Bash.
|
||||
|
||||
---------------------------
|
||||
Updating Installed Packages
|
||||
---------------------------
|
||||
|
||||
Let's say you upgraded to a new version of macOS, or a new version of Python
|
||||
was released, and you want to rebuild your entire software stack. To do this,
|
||||
simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack concretize --force
|
||||
$ spack install
|
||||
|
||||
The ``--force`` flag tells Spack to overwrite its previous concretization
|
||||
decisions, allowing you to choose a new version of Python. If any of the new
|
||||
packages like Bash are already installed, ``spack install`` won't re-install
|
||||
them, it will keep the symlinks in place.
|
||||
|
||||
--------------
|
||||
Uninstallation
|
||||
--------------
|
||||
|
||||
If you decide that Spack isn't right for you, uninstallation is simple.
|
||||
Just run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack uninstall --all
|
||||
|
||||
This will uninstall all packages in your environment and remove the symlinks.
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _repositories:
|
||||
|
||||
=============================
|
||||
Package Repositories
|
||||
=============================
|
||||
=================================
|
||||
Package Repositories (repos.yaml)
|
||||
=================================
|
||||
|
||||
Spack comes with thousands of built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
|
@@ -1,5 +1,5 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.7/3.5-3.9, , Interpreter for Spack
|
||||
Python, 2.7/3.5-3.10, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
|
|
File diff suppressed because it is too large
Load Diff
8
lib/spack/env/cc
vendored
8
lib/spack/env/cc
vendored
@@ -241,28 +241,28 @@ case "$command" in
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc)
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt)
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
f77|xlf|xlf_r|pgf77|amdflang)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
|
1
lib/spack/env/rocmcc/amdclang
vendored
Symbolic link
1
lib/spack/env/rocmcc/amdclang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang++
vendored
Symbolic link
1
lib/spack/env/rocmcc/amdclang++
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cpp
|
1
lib/spack/env/rocmcc/amdflang
vendored
Symbolic link
1
lib/spack/env/rocmcc/amdflang
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../fc
|
@@ -5,26 +5,36 @@
|
||||
import collections
|
||||
import errno
|
||||
import glob
|
||||
import grp
|
||||
import hashlib
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from sys import platform as _platform
|
||||
|
||||
import six
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.compat import Sequence
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == 'win32'
|
||||
|
||||
if not is_windows:
|
||||
import grp
|
||||
import pwd
|
||||
else:
|
||||
import win32security
|
||||
|
||||
|
||||
__all__ = [
|
||||
'FileFilter',
|
||||
@@ -44,6 +54,7 @@
|
||||
'fix_darwin_install_name',
|
||||
'force_remove',
|
||||
'force_symlink',
|
||||
'getuid',
|
||||
'chgrp',
|
||||
'chmod_x',
|
||||
'copy',
|
||||
@@ -60,6 +71,7 @@
|
||||
'remove_directory_contents',
|
||||
'remove_if_dead_link',
|
||||
'remove_linked_tree',
|
||||
'rename',
|
||||
'set_executable',
|
||||
'set_install_permissions',
|
||||
'touch',
|
||||
@@ -71,6 +83,26 @@
|
||||
]
|
||||
|
||||
|
||||
def getuid():
|
||||
if is_windows:
|
||||
import ctypes
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return 1
|
||||
return 0
|
||||
else:
|
||||
return os.getuid()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
if is_windows:
|
||||
if os.path.exists(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def path_contains_subdirectory(path, root):
|
||||
norm_root = os.path.abspath(root).rstrip(os.path.sep) + os.path.sep
|
||||
norm_path = os.path.abspath(path).rstrip(os.path.sep) + os.path.sep
|
||||
@@ -95,6 +127,7 @@ def paths_containing_libs(paths, library_names):
|
||||
required_lib_fnames = possible_library_filenames(library_names)
|
||||
|
||||
rpaths_to_include = []
|
||||
paths = path_to_os_path(*paths)
|
||||
for path in paths:
|
||||
fnames = set(os.listdir(path))
|
||||
if fnames & required_lib_fnames:
|
||||
@@ -103,6 +136,7 @@ def paths_containing_libs(paths, library_names):
|
||||
return rpaths_to_include
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def same_path(path1, path2):
|
||||
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
|
||||
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
|
||||
@@ -153,7 +187,7 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
@@ -263,13 +297,39 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
||||
|
||||
repl = r's@\1@\2@g'
|
||||
repl = repl.replace('@', new_delim)
|
||||
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for f in filenames:
|
||||
filter_file(whole_lines, repl, f)
|
||||
filter_file(single_quoted, "'%s'" % repl, f)
|
||||
filter_file(double_quoted, '"%s"' % repl, f)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
p_stat = os.stat(path)
|
||||
if p_stat.st_mode & stat.S_IRWXU != stat.S_IRWXU:
|
||||
tty.error("Expected {0} to support mode {1}, but it is {2}"
|
||||
.format(path, stat.S_IRWXU, p_stat.st_mode))
|
||||
|
||||
raise OSError(errno.EACCES,
|
||||
err_msg.format(path, path) if err_msg else "")
|
||||
else:
|
||||
p_stat = os.stat(path)
|
||||
|
||||
if _platform != "win32":
|
||||
owner_uid = p_stat.st_uid
|
||||
else:
|
||||
sid = win32security.GetFileSecurity(
|
||||
path, win32security.OWNER_SECURITY_INFORMATION) \
|
||||
.GetSecurityDescriptorOwner()
|
||||
owner_uid = win32security.LookupAccountSid(None, sid)[0]
|
||||
return owner_uid
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def set_install_permissions(path):
|
||||
"""Set appropriate permissions on the installed file."""
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
@@ -292,14 +352,22 @@ def group_ids(uid=None):
|
||||
Returns:
|
||||
(list of int): gids of groups the user is a member of
|
||||
"""
|
||||
if is_windows:
|
||||
tty.warn("Function is not supported on Windows")
|
||||
return []
|
||||
|
||||
if uid is None:
|
||||
uid = os.getuid()
|
||||
uid = getuid()
|
||||
user = pwd.getpwuid(uid).pw_name
|
||||
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
|
||||
if isinstance(group, six.string_types):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
@@ -307,6 +375,7 @@ def chgrp(path, group):
|
||||
os.chown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chmod_x(entry, perms):
|
||||
"""Implements chmod, treating all executable bits as set using the chmod
|
||||
utility's `+X` option.
|
||||
@@ -320,6 +389,7 @@ def chmod_x(entry, perms):
|
||||
os.chmod(entry, perms)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link.
|
||||
"""
|
||||
@@ -336,6 +406,7 @@ def copy_mode(src, dest):
|
||||
os.chmod(dest, dest_mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def unset_executable_mode(path):
|
||||
mode = os.stat(path).st_mode
|
||||
mode &= ~stat.S_IXUSR
|
||||
@@ -344,6 +415,7 @@ def unset_executable_mode(path):
|
||||
os.chmod(path, mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy(src, dest, _permissions=False):
|
||||
"""Copy the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
@@ -388,6 +460,7 @@ def copy(src, dest, _permissions=False):
|
||||
copy_mode(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install(src, dest):
|
||||
"""Install the file(s) *src* to the file or directory *dest*.
|
||||
|
||||
@@ -406,6 +479,7 @@ def install(src, dest):
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
@@ -420,6 +494,7 @@ def resolve_link_target_relative_to_the_link(link):
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
"""Recursively copy an entire directory tree rooted at *src*.
|
||||
|
||||
@@ -488,7 +563,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
.format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
os.symlink(target, d)
|
||||
symlink(target, d)
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
@@ -504,6 +579,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
@@ -523,11 +599,13 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def is_exe(path):
|
||||
"""True if path is an executable file."""
|
||||
return os.path.isfile(path) and os.access(path, os.X_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def get_filetype(path_name):
|
||||
"""
|
||||
Return the output of file path_name as a string to identify file type.
|
||||
@@ -539,6 +617,30 @@ def get_filetype(path_name):
|
||||
return output.strip()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def is_nonsymlink_exe_with_shebang(path):
|
||||
"""
|
||||
Returns whether the path is an executable script with a shebang.
|
||||
Return False when the path is a *symlink* to an executable script.
|
||||
"""
|
||||
try:
|
||||
st = os.lstat(path)
|
||||
# Should not be a symlink
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
return False
|
||||
|
||||
# Should be executable
|
||||
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
|
||||
return False
|
||||
|
||||
# Should start with a shebang
|
||||
with open(path, 'rb') as f:
|
||||
return f.read(2) == b'#!'
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp_if_not_world_writable(path, group):
|
||||
"""chgrp path to group if path is not world writable"""
|
||||
mode = os.stat(path).st_mode
|
||||
@@ -568,7 +670,7 @@ def mkdirp(*paths, **kwargs):
|
||||
mode = kwargs.get('mode', None)
|
||||
group = kwargs.get('group', None)
|
||||
default_perms = kwargs.get('default_perms', 'args')
|
||||
|
||||
paths = path_to_os_path(*paths)
|
||||
for path in paths:
|
||||
if not os.path.exists(path):
|
||||
try:
|
||||
@@ -629,6 +731,7 @@ def mkdirp(*paths, **kwargs):
|
||||
raise OSError(errno.EEXIST, "File already exists", path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def force_remove(*paths):
|
||||
"""Remove files without printing errors. Like ``rm -f``, does NOT
|
||||
remove directories."""
|
||||
@@ -640,6 +743,7 @@ def force_remove(*paths):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def working_dir(dirname, **kwargs):
|
||||
if kwargs.get('create', False):
|
||||
mkdirp(dirname)
|
||||
@@ -659,6 +763,7 @@ def __init__(self, inner_exception, outer_exception):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
within the context manager don't raise an exception, the directory is
|
||||
@@ -714,6 +819,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def hash_directory(directory, ignore=[]):
|
||||
"""Hashes recursively the content of a directory.
|
||||
|
||||
@@ -742,6 +848,7 @@ def hash_directory(directory, ignore=[]):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def write_tmp_and_move(filename):
|
||||
"""Write to a temporary file, then move into place."""
|
||||
dirname = os.path.dirname(filename)
|
||||
@@ -753,6 +860,7 @@ def write_tmp_and_move(filename):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def open_if_filename(str_or_file, mode='r'):
|
||||
"""Takes either a path or a file object, and opens it if it is a path.
|
||||
|
||||
@@ -765,9 +873,13 @@ def open_if_filename(str_or_file, mode='r'):
|
||||
yield str_or_file
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def touch(path):
|
||||
"""Creates an empty file at the specified path."""
|
||||
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
|
||||
if is_windows:
|
||||
perms = (os.O_WRONLY | os.O_CREAT)
|
||||
else:
|
||||
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
|
||||
fd = None
|
||||
try:
|
||||
fd = os.open(path, perms)
|
||||
@@ -777,6 +889,7 @@ def touch(path):
|
||||
os.close(fd)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def touchp(path):
|
||||
"""Like ``touch``, but creates any parent directories needed for the file.
|
||||
"""
|
||||
@@ -784,14 +897,16 @@ def touchp(path):
|
||||
touch(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def force_symlink(src, dest):
|
||||
try:
|
||||
os.symlink(src, dest)
|
||||
symlink(src, dest)
|
||||
except OSError:
|
||||
os.remove(dest)
|
||||
os.symlink(src, dest)
|
||||
symlink(src, dest)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def join_path(prefix, *args):
|
||||
path = str(prefix)
|
||||
for elt in args:
|
||||
@@ -799,6 +914,7 @@ def join_path(prefix, *args):
|
||||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def ancestor(dir, n=1):
|
||||
"""Get the nth ancestor of a directory."""
|
||||
parent = os.path.abspath(dir)
|
||||
@@ -807,6 +923,7 @@ def ancestor(dir, n=1):
|
||||
return parent
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def get_single_file(directory):
|
||||
fnames = os.listdir(directory)
|
||||
if len(fnames) != 1:
|
||||
@@ -826,6 +943,7 @@ def temp_cwd():
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def temp_rename(orig_path, temp_path):
|
||||
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
|
||||
if not same_path:
|
||||
@@ -837,11 +955,13 @@ def temp_rename(orig_path, temp_path):
|
||||
shutil.move(temp_path, orig_path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def can_access(file_name):
|
||||
"""True if we have read/write access to the file."""
|
||||
return os.access(file_name, os.R_OK | os.W_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
"""Traverse two filesystem trees simultaneously.
|
||||
|
||||
@@ -924,6 +1044,80 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
yield (source_path, dest_path)
|
||||
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls."""
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
except (IOError, OSError):
|
||||
return False, False, False
|
||||
|
||||
is_link = stat.S_ISLNK(lst.st_mode)
|
||||
|
||||
# Check whether file is a dir.
|
||||
if not is_link:
|
||||
is_dir = stat.S_ISDIR(lst.st_mode)
|
||||
return True, is_link, is_dir
|
||||
|
||||
# Check whether symlink points to a dir.
|
||||
try:
|
||||
st = os.stat(path)
|
||||
is_dir = stat.S_ISDIR(st.st_mode)
|
||||
except (IOError, OSError):
|
||||
# Dangling symlink (i.e. it lexists but not exists)
|
||||
is_dir = False
|
||||
|
||||
return True, is_link, is_dir
|
||||
|
||||
|
||||
def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
"""
|
||||
Recurses the directory root depth-first through a visitor pattern
|
||||
|
||||
The visitor interface is as follows:
|
||||
- visit_file(root, rel_path, depth)
|
||||
- before_visit_dir(root, rel_path, depth) -> bool
|
||||
if True, descends into this directory
|
||||
- before_visit_symlinked_dir(root, rel_path, depth) -> bool
|
||||
if True, descends into this directory
|
||||
- after_visit_dir(root, rel_path, depth) -> void
|
||||
only called when before_visit_dir returns True
|
||||
- after_visit_symlinked_dir(root, rel_path, depth) -> void
|
||||
only called when before_visit_symlinked_dir returns True
|
||||
"""
|
||||
dir = os.path.join(root, rel_path)
|
||||
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) # novermin
|
||||
else:
|
||||
dir_entries = os.listdir(dir)
|
||||
dir_entries.sort()
|
||||
|
||||
for f in dir_entries:
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink, isdir = f.is_symlink(), f.is_dir()
|
||||
else:
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
||||
if not lexists:
|
||||
continue
|
||||
|
||||
if not isdir:
|
||||
# Handle files
|
||||
visitor.visit_file(root, rel_child, depth)
|
||||
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
|
||||
# Handle ordinary directories
|
||||
visit_directory_tree(root, visitor, rel_child, depth + 1)
|
||||
visitor.after_visit_dir(root, rel_child, depth)
|
||||
elif islink and visitor.before_visit_symlinked_dir(root, rel_child, depth):
|
||||
# Handle symlinked directories
|
||||
visit_directory_tree(root, visitor, rel_child, depth + 1)
|
||||
visitor.after_visit_symlinked_dir(root, rel_child, depth)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def set_executable(path):
|
||||
mode = os.stat(path).st_mode
|
||||
if mode & stat.S_IRUSR:
|
||||
@@ -935,6 +1129,7 @@ def set_executable(path):
|
||||
os.chmod(path, mode)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
@@ -944,6 +1139,7 @@ def last_modification_time_recursive(path):
|
||||
return max(times)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_empty_directories(root):
|
||||
"""Ascend up from the leaves accessible from `root` and remove empty
|
||||
directories.
|
||||
@@ -960,6 +1156,7 @@ def remove_empty_directories(root):
|
||||
pass
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_dead_links(root):
|
||||
"""Recursively removes any dead link that is present in root.
|
||||
|
||||
@@ -972,6 +1169,7 @@ def remove_dead_links(root):
|
||||
remove_if_dead_link(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_if_dead_link(path):
|
||||
"""Removes the argument if it is a dead link.
|
||||
|
||||
@@ -982,6 +1180,7 @@ def remove_if_dead_link(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_linked_tree(path):
|
||||
"""Removes a directory and its contents.
|
||||
|
||||
@@ -991,15 +1190,31 @@ def remove_linked_tree(path):
|
||||
Parameters:
|
||||
path (str): Directory to be removed
|
||||
"""
|
||||
# On windows, cleaning a Git stage can be an issue
|
||||
# as git leaves readonly files that Python handles
|
||||
# poorly on Windows. Remove readonly status and try again
|
||||
def onerror(func, path, exe_info):
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
try:
|
||||
func(path)
|
||||
except Exception as e:
|
||||
tty.warn(e)
|
||||
pass
|
||||
|
||||
kwargs = {'ignore_errors': True}
|
||||
if is_windows:
|
||||
kwargs = {'onerror': onerror}
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), True)
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
shutil.rmtree(path, True)
|
||||
shutil.rmtree(path, **kwargs)
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def safe_remove(*files_or_dirs):
|
||||
"""Context manager to remove the files passed as input, but restore
|
||||
them in case any exception is raised in the context block.
|
||||
@@ -1046,6 +1261,7 @@ def safe_remove(*files_or_dirs):
|
||||
raise
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
@@ -1132,6 +1348,7 @@ def find(root, files, recursive=True):
|
||||
return _find_non_recursive(root, files)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
@@ -1142,7 +1359,6 @@ def _find_recursive(root, search_files):
|
||||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
@@ -1156,6 +1372,7 @@ def _find_recursive(root, search_files):
|
||||
return answer
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
@@ -1287,7 +1504,7 @@ def directories(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
value = [value]
|
||||
|
||||
self._directories = [os.path.normpath(x) for x in value]
|
||||
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
|
||||
|
||||
def _default_directories(self):
|
||||
"""Default computation of directories based on the list of
|
||||
@@ -1445,6 +1662,7 @@ def find_headers(headers, root, recursive=False):
|
||||
return HeaderList(find(root, headers, recursive))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def find_all_headers(root):
|
||||
"""Convenience function that returns the list of all headers found
|
||||
in the directory passed as argument.
|
||||
@@ -1672,6 +1890,7 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
return LibraryList(found_libs)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_access_dir(path):
|
||||
"""Returns True if the argument is an accessible directory.
|
||||
@@ -1685,6 +1904,7 @@ def can_access_dir(path):
|
||||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def can_write_to_dir(path):
|
||||
"""Return True if the argument is a directory in which we can write.
|
||||
@@ -1698,6 +1918,7 @@ def can_write_to_dir(path):
|
||||
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@memoized
|
||||
def files_in(*search_paths):
|
||||
"""Returns all the files in paths passed as arguments.
|
||||
@@ -1719,6 +1940,12 @@ def files_in(*search_paths):
|
||||
return files
|
||||
|
||||
|
||||
def is_readable_file(file_path):
|
||||
"""Return True if the path passed as argument is readable"""
|
||||
return os.path.isfile(file_path) and os.access(file_path, os.R_OK)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def search_paths_for_executables(*path_hints):
|
||||
"""Given a list of path hints returns a list of paths where
|
||||
to search for an executable.
|
||||
@@ -1746,6 +1973,39 @@ def search_paths_for_executables(*path_hints):
|
||||
return executable_paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def search_paths_for_libraries(*path_hints):
|
||||
"""Given a list of path hints returns a list of paths where
|
||||
to search for a shared library.
|
||||
|
||||
Args:
|
||||
*path_hints (list of paths): list of paths taken into
|
||||
consideration for a search
|
||||
|
||||
Returns:
|
||||
A list containing the real path of every existing directory
|
||||
in `path_hints` and its `lib` and `lib64` subdirectory if it exists.
|
||||
"""
|
||||
library_paths = []
|
||||
for path in path_hints:
|
||||
if not os.path.isdir(path):
|
||||
continue
|
||||
|
||||
path = os.path.abspath(path)
|
||||
library_paths.append(path)
|
||||
|
||||
lib_dir = os.path.join(path, 'lib')
|
||||
if os.path.isdir(lib_dir):
|
||||
library_paths.append(lib_dir)
|
||||
|
||||
lib64_dir = os.path.join(path, 'lib64')
|
||||
if os.path.isdir(lib64_dir):
|
||||
library_paths.append(lib64_dir)
|
||||
|
||||
return library_paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def partition_path(path, entry=None):
|
||||
"""
|
||||
Split the prefixes of the path at the first occurrence of entry and
|
||||
@@ -1762,7 +2022,11 @@ def partition_path(path, entry=None):
|
||||
# Derive the index of entry within paths, which will correspond to
|
||||
# the location of the entry in within the path.
|
||||
try:
|
||||
entries = path.split(os.sep)
|
||||
sep = os.sep
|
||||
entries = path.split(sep)
|
||||
if entries[0].endswith(":"):
|
||||
# Handle drive letters e.g. C:/ on Windows
|
||||
entries[0] = entries[0] + sep
|
||||
i = entries.index(entry)
|
||||
if '' in entries:
|
||||
i -= 1
|
||||
@@ -1773,6 +2037,7 @@ def partition_path(path, entry=None):
|
||||
return paths, '', []
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def prefixes(path):
|
||||
"""
|
||||
Returns a list containing the path and its ancestors, top-to-bottom.
|
||||
@@ -1786,6 +2051,9 @@ def prefixes(path):
|
||||
For example, path ``./hi/jkl/mn`` results in a list with the following
|
||||
paths, in order: ``./hi``, ``./hi/jkl``, and ``./hi/jkl/mn``.
|
||||
|
||||
On Windows, paths will be normalized to use ``/`` and ``/`` will always
|
||||
be used as the separator instead of ``os.sep``.
|
||||
|
||||
Parameters:
|
||||
path (str): the string used to derive ancestor paths
|
||||
|
||||
@@ -1794,14 +2062,17 @@ def prefixes(path):
|
||||
"""
|
||||
if not path:
|
||||
return []
|
||||
|
||||
parts = path.strip(os.sep).split(os.sep)
|
||||
if path.startswith(os.sep):
|
||||
parts.insert(0, os.sep)
|
||||
sep = os.sep
|
||||
parts = path.strip(sep).split(sep)
|
||||
if path.startswith(sep):
|
||||
parts.insert(0, sep)
|
||||
elif parts[0].endswith(":"):
|
||||
# Handle drive letters e.g. C:/ on Windows
|
||||
parts[0] = parts[0] + sep
|
||||
paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))]
|
||||
|
||||
try:
|
||||
paths.remove(os.sep)
|
||||
paths.remove(sep)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
@@ -1813,6 +2084,7 @@ def prefixes(path):
|
||||
return paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def md5sum(file):
|
||||
"""Compute the MD5 sum of a file.
|
||||
|
||||
@@ -1828,6 +2100,7 @@ def md5sum(file):
|
||||
return md5.digest()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
@@ -1839,6 +2112,7 @@ def remove_directory_contents(dir):
|
||||
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def keep_modification_time(*filenames):
|
||||
"""
|
||||
Context manager to keep the modification timestamps of the input files.
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import six
|
||||
from six import string_types
|
||||
|
||||
from llnl.util.compat import MutableMapping, zip_longest
|
||||
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
@@ -589,20 +589,31 @@ def match(string):
|
||||
return match
|
||||
|
||||
|
||||
def dedupe(sequence):
|
||||
"""Yields a stable de-duplication of an hashable sequence
|
||||
def dedupe(sequence, key=None):
|
||||
"""Yields a stable de-duplication of an hashable sequence by key
|
||||
|
||||
Args:
|
||||
sequence: hashable sequence to be de-duplicated
|
||||
key: callable applied on values before uniqueness test; identity
|
||||
by default.
|
||||
|
||||
Returns:
|
||||
stable de-duplication of the sequence
|
||||
|
||||
Examples:
|
||||
|
||||
Dedupe a list of integers:
|
||||
|
||||
[x for x in dedupe([1, 2, 1, 3, 2])] == [1, 2, 3]
|
||||
|
||||
[x for x in llnl.util.lang.dedupe([1,-2,1,3,2], key=abs)] == [1, -2, 3]
|
||||
"""
|
||||
seen = set()
|
||||
for x in sequence:
|
||||
if x not in seen:
|
||||
x_key = x if key is None else key(x)
|
||||
if x_key not in seen:
|
||||
yield x
|
||||
seen.add(x)
|
||||
seen.add(x_key)
|
||||
|
||||
|
||||
def pretty_date(time, now=None):
|
||||
@@ -878,11 +889,6 @@ def load_module_from_file(module_name, module_path):
|
||||
except KeyError:
|
||||
pass
|
||||
raise
|
||||
elif sys.version_info[0] == 3 and sys.version_info[1] < 5:
|
||||
import importlib.machinery
|
||||
loader = importlib.machinery.SourceFileLoader( # novm
|
||||
module_name, module_path)
|
||||
module = loader.load_module()
|
||||
elif sys.version_info[0] == 2:
|
||||
import imp
|
||||
module = imp.load_source(module_name, module_path)
|
||||
@@ -956,3 +962,50 @@ def nullcontext(*args, **kwargs):
|
||||
|
||||
class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
|
||||
def enum(**kwargs):
|
||||
"""Return an enum-like class.
|
||||
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type('Enum', (object,), kwargs)
|
||||
|
||||
|
||||
class TypedMutableSequence(MutableSequence):
|
||||
"""Base class that behaves like a list, just with a different type.
|
||||
|
||||
Client code can inherit from this base class:
|
||||
|
||||
class Foo(TypedMutableSequence):
|
||||
pass
|
||||
|
||||
and later perform checks based on types:
|
||||
|
||||
if isinstance(l, Foo):
|
||||
# do something
|
||||
"""
|
||||
def __init__(self, iterable):
|
||||
self.data = list(iterable)
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.data[item]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.data[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.data[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
def insert(self, index, item):
|
||||
self.data.insert(index, item)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.data)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
@@ -10,9 +10,11 @@
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
from collections import OrderedDict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp, touch, traverse_tree
|
||||
from llnl.util.symlink import islink, symlink
|
||||
|
||||
__all__ = ['LinkTree']
|
||||
|
||||
@@ -20,7 +22,7 @@
|
||||
|
||||
|
||||
def remove_link(src, dest):
|
||||
if not os.path.islink(dest):
|
||||
if not islink(dest):
|
||||
raise ValueError("%s is not a link tree!" % dest)
|
||||
# remove if dest is a hardlink/symlink to src; this will only
|
||||
# be false if two packages are merged into a prefix and have a
|
||||
@@ -29,6 +31,246 @@ def remove_link(src, dest):
|
||||
os.remove(dest)
|
||||
|
||||
|
||||
class MergeConflict:
|
||||
"""
|
||||
The invariant here is that src_a and src_b are both mapped
|
||||
to dst:
|
||||
|
||||
project(src_a) == project(src_b) == dst
|
||||
"""
|
||||
def __init__(self, dst, src_a=None, src_b=None):
|
||||
self.dst = dst
|
||||
self.src_a = src_a
|
||||
self.src_b = src_b
|
||||
|
||||
|
||||
class SourceMergeVisitor(object):
|
||||
"""
|
||||
Visitor that produces actions:
|
||||
- An ordered list of directories to create in dst
|
||||
- A list of files to link in dst
|
||||
- A list of merge conflicts in dst/
|
||||
"""
|
||||
def __init__(self, ignore=None):
|
||||
self.ignore = ignore if ignore is not None else lambda f: False
|
||||
|
||||
# When mapping <src root> to <dst root>/<projection>, we need
|
||||
# to prepend the <projection> bit to the relative path in the
|
||||
# destination dir.
|
||||
self.projection = ''
|
||||
|
||||
# When a file blocks another file, the conflict can sometimes
|
||||
# be resolved / ignored (e.g. <prefix>/LICENSE or
|
||||
# or <site-packages>/<namespace>/__init__.py conflicts can be
|
||||
# ignored).
|
||||
self.file_conflicts = []
|
||||
|
||||
# When we have to create a dir where a file is, or a file
|
||||
# where a dir is, we have fatal errors, listed here.
|
||||
self.fatal_conflicts = []
|
||||
|
||||
# What directories we have to make; this is an ordered set,
|
||||
# so that we have a fast lookup and can run mkdir in order.
|
||||
self.directories = OrderedDict()
|
||||
|
||||
# Files to link. Maps dst_rel to (src_rel, src_root)
|
||||
self.files = OrderedDict()
|
||||
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||
"""
|
||||
proj_rel_path = os.path.join(self.projection, rel_path)
|
||||
|
||||
if self.ignore(rel_path):
|
||||
# Don't recurse when dir is ignored.
|
||||
return False
|
||||
elif proj_rel_path in self.files:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
return False
|
||||
elif proj_rel_path in self.directories:
|
||||
# No new directory, carry on.
|
||||
return True
|
||||
else:
|
||||
# Register new directory.
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
return True
|
||||
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Replace symlinked dirs with actual directories when possible in low depths,
|
||||
otherwise handle it as a file (i.e. we link to the symlink).
|
||||
|
||||
Transforming symlinks into dirs makes it more likely we can merge directories,
|
||||
e.g. when <prefix>/lib -> <prefix>/subdir/lib.
|
||||
|
||||
We only do this when the symlink is pointing into a subdirectory from the
|
||||
symlink's directory, to avoid potential infinite recursion; and only at a
|
||||
constant level of nesting, to avoid potential exponential blowups in file
|
||||
duplication.
|
||||
"""
|
||||
if self.ignore(rel_path):
|
||||
return False
|
||||
|
||||
# Only follow symlinked dirs in <prefix>/**/**/*
|
||||
if depth > 1:
|
||||
handle_as_dir = False
|
||||
else:
|
||||
# Only follow symlinked dirs when pointing deeper
|
||||
src = os.path.join(root, rel_path)
|
||||
real_parent = os.path.realpath(os.path.dirname(src))
|
||||
real_child = os.path.realpath(src)
|
||||
handle_as_dir = real_child.startswith(real_parent)
|
||||
|
||||
if handle_as_dir:
|
||||
return self.before_visit_dir(root, rel_path, depth)
|
||||
|
||||
self.visit_file(root, rel_path, depth)
|
||||
return False
|
||||
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
proj_rel_path = os.path.join(self.projection, rel_path)
|
||||
|
||||
if self.ignore(rel_path):
|
||||
pass
|
||||
elif proj_rel_path in self.directories:
|
||||
# Can't create a file where a dir is; fatal error
|
||||
src_a_root, src_a_relpath = self.directories[proj_rel_path]
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
elif proj_rel_path in self.files:
|
||||
# In some cases we can resolve file-file conflicts
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.file_conflicts.append(MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path)))
|
||||
else:
|
||||
# Otherwise register this file to be linked.
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
def set_projection(self, projection):
|
||||
self.projection = os.path.normpath(projection)
|
||||
|
||||
# Todo, is this how to check in general for empty projection?
|
||||
if self.projection == '.':
|
||||
self.projection = ''
|
||||
return
|
||||
|
||||
# If there is a projection, we'll also create the directories
|
||||
# it consists of, and check whether that's causing conflicts.
|
||||
path = ''
|
||||
for part in self.projection.split(os.sep):
|
||||
path = os.path.join(path, part)
|
||||
if path not in self.files:
|
||||
self.directories[path] = ('<projection>', path)
|
||||
else:
|
||||
# Can't create a dir where a file is.
|
||||
src_a_root, src_a_relpath = self.files[path]
|
||||
self.fatal_conflicts.append(MergeConflict(
|
||||
dst=path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join('<projection>', path)))
|
||||
|
||||
|
||||
class DestinationMergeVisitor(object):
|
||||
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
|
||||
and:
|
||||
|
||||
a. registers additional conflicts when merging
|
||||
to the destination prefix
|
||||
b. removes redundant mkdir operations when
|
||||
directories already exist in the destination
|
||||
prefix.
|
||||
|
||||
This also makes sure that symlinked directories
|
||||
in the target prefix will never be merged with
|
||||
directories in the sources directories.
|
||||
"""
|
||||
def __init__(self, source_merge_visitor):
|
||||
self.src = source_merge_visitor
|
||||
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
# If destination dir is a file in a src dir, add a conflict,
|
||||
# and don't traverse deeper
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
return False
|
||||
|
||||
# If destination dir was also a src dir, remove the mkdir
|
||||
# action, and traverse deeper.
|
||||
if rel_path in self.src.directories:
|
||||
del self.src.directories[rel_path]
|
||||
return True
|
||||
|
||||
# If the destination dir does not appear in the src dir,
|
||||
# don't descend into it.
|
||||
return False
|
||||
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Symlinked directories in the destination prefix should
|
||||
be seen as files; we should not accidentally merge
|
||||
source dir with a symlinked dest dir.
|
||||
"""
|
||||
# Always conflict
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
if rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
pass
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
# Can't merge a file if target already exists
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
elif rel_path in self.src.files:
|
||||
src_a_root, src_a_relpath = self.src.files[rel_path]
|
||||
self.src.fatal_conflicts.append(MergeConflict(
|
||||
rel_path,
|
||||
os.path.join(src_a_root, src_a_relpath),
|
||||
os.path.join(root, rel_path)))
|
||||
|
||||
|
||||
class LinkTree(object):
|
||||
"""Class to create trees of symbolic links from a source directory.
|
||||
|
||||
@@ -113,7 +355,7 @@ def unmerge_directories(self, dest_root, ignore):
|
||||
os.remove(marker)
|
||||
|
||||
def merge(self, dest_root, ignore_conflicts=False, ignore=None,
|
||||
link=os.symlink, relative=False):
|
||||
link=symlink, relative=False):
|
||||
"""Link all files in src into dest, creating directories
|
||||
if necessary.
|
||||
|
||||
@@ -125,7 +367,7 @@ def merge(self, dest_root, ignore_conflicts=False, ignore=None,
|
||||
ignore (callable): callable that returns True if a file is to be
|
||||
ignored in the merge (by default ignore nothing)
|
||||
|
||||
link (callable): function to create links with (defaults to os.symlink)
|
||||
link (callable): function to create links with (defaults to llnl.util.symlink)
|
||||
|
||||
relative (bool): create all symlinks relative to the target
|
||||
(default False)
|
||||
@@ -137,7 +379,7 @@ def merge(self, dest_root, ignore_conflicts=False, ignore=None,
|
||||
conflict = self.find_conflict(
|
||||
dest_root, ignore=ignore, ignore_file_conflicts=ignore_conflicts)
|
||||
if conflict:
|
||||
raise MergeConflictError(conflict)
|
||||
raise SingleMergeConflictError(conflict)
|
||||
|
||||
self.merge_directories(dest_root, ignore)
|
||||
existing = []
|
||||
@@ -169,7 +411,24 @@ def unmerge(self, dest_root, ignore=None, remove_file=remove_link):
|
||||
|
||||
|
||||
class MergeConflictError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super(MergeConflictError, self).__init__(
|
||||
"Package merge blocked by file: %s" % path)
|
||||
|
||||
|
||||
class MergeConflictSummary(MergeConflictError):
|
||||
def __init__(self, conflicts):
|
||||
"""
|
||||
A human-readable summary of file system view merge conflicts (showing only the
|
||||
first 3 issues.)
|
||||
"""
|
||||
msg = "{0} fatal error(s) when merging prefixes:".format(len(conflicts))
|
||||
# show the first 3 merge conflicts.
|
||||
for conflict in conflicts[:3]:
|
||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||
conflict.src_a, conflict.src_b, conflict.dst)
|
||||
super(MergeConflictSummary, self).__init__(msg)
|
||||
|
@@ -4,9 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import fcntl
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple # novm
|
||||
@@ -15,6 +15,10 @@
|
||||
|
||||
import spack.util.string
|
||||
|
||||
if sys.platform != 'win32':
|
||||
import fcntl
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Lock',
|
||||
'LockDowngradeError',
|
||||
@@ -29,8 +33,6 @@
|
||||
'CantCreateLockError'
|
||||
]
|
||||
|
||||
#: Mapping of supported locks to description
|
||||
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
|
||||
|
||||
#: A useful replacement for functions that should return True when not provided
|
||||
#: for example.
|
||||
@@ -166,6 +168,30 @@ def _attempts_str(wait_time, nattempts):
|
||||
return ' after {0:0.2f}s and {1}'.format(wait_time, attempts)
|
||||
|
||||
|
||||
class LockType(object):
|
||||
READ = 0
|
||||
WRITE = 1
|
||||
|
||||
@staticmethod
|
||||
def to_str(tid):
|
||||
ret = "READ"
|
||||
if tid == LockType.WRITE:
|
||||
ret = "WRITE"
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def to_module(tid):
|
||||
lock = fcntl.LOCK_SH
|
||||
if tid == LockType.WRITE:
|
||||
lock = fcntl.LOCK_EX
|
||||
return lock
|
||||
|
||||
@staticmethod
|
||||
def is_valid(op):
|
||||
return op == LockType.READ \
|
||||
or op == LockType.WRITE
|
||||
|
||||
|
||||
class Lock(object):
|
||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||
|
||||
@@ -276,9 +302,10 @@ def _lock(self, op, timeout=None):
|
||||
successfully acquired, the total wait time and the number of attempts
|
||||
is returned.
|
||||
"""
|
||||
assert op in lock_type
|
||||
assert LockType.is_valid(op)
|
||||
op_str = LockType.to_str(op)
|
||||
|
||||
self._log_acquiring('{0} LOCK'.format(lock_type[op].upper()))
|
||||
self._log_acquiring('{0} LOCK'.format(op_str))
|
||||
timeout = timeout or self.default_timeout
|
||||
|
||||
# Create file and parent directories if they don't exist.
|
||||
@@ -286,13 +313,13 @@ def _lock(self, op, timeout=None):
|
||||
self._ensure_parent_directory()
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
|
||||
if op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
|
||||
.format(lock_type[op], self._start, self._length,
|
||||
.format(op_str.lower(), self._start, self._length,
|
||||
timeout))
|
||||
|
||||
poll_intervals = iter(Lock._poll_interval_generator())
|
||||
@@ -313,17 +340,16 @@ def _lock(self, op, timeout=None):
|
||||
return total_wait_time, num_attempts
|
||||
|
||||
raise LockTimeoutError("Timed out waiting for a {0} lock."
|
||||
.format(lock_type[op]))
|
||||
.format(op_str.lower()))
|
||||
|
||||
def _poll_lock(self, op):
|
||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||
the locking attempt succeeds
|
||||
"""
|
||||
assert op in lock_type
|
||||
|
||||
module_op = LockType.to_module(op)
|
||||
try:
|
||||
# Try to get the lock (will raise if not available.)
|
||||
fcntl.lockf(self._file, op | fcntl.LOCK_NB,
|
||||
fcntl.lockf(self._file, module_op | fcntl.LOCK_NB,
|
||||
self._length, self._start, os.SEEK_SET)
|
||||
|
||||
# help for debugging distributed locking
|
||||
@@ -331,11 +357,11 @@ def _poll_lock(self, op):
|
||||
# All locks read the owner PID and host
|
||||
self._read_log_debug_data()
|
||||
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
|
||||
.format(lock_type[op], self.path,
|
||||
.format(LockType.to_str(op), self.path,
|
||||
self._start, self._length, self.pid))
|
||||
|
||||
# Exclusive locks write their PID/host
|
||||
if op == fcntl.LOCK_EX:
|
||||
if module_op == fcntl.LOCK_EX:
|
||||
self._write_log_debug_data()
|
||||
|
||||
return True
|
||||
@@ -420,7 +446,7 @@ def acquire_read(self, timeout=None):
|
||||
|
||||
if self._reads == 0 and self._writes == 0:
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_SH, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
|
||||
self._reads += 1
|
||||
# Log if acquired, which includes counts when verbose
|
||||
self._log_acquired('READ LOCK', wait_time, nattempts)
|
||||
@@ -445,7 +471,7 @@ def acquire_write(self, timeout=None):
|
||||
|
||||
if self._writes == 0:
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_EX, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
|
||||
self._writes += 1
|
||||
# Log if acquired, which includes counts when verbose
|
||||
self._log_acquired('WRITE LOCK', wait_time, nattempts)
|
||||
@@ -489,7 +515,7 @@ def downgrade_write_to_read(self, timeout=None):
|
||||
if self._writes == 1 and self._reads == 0:
|
||||
self._log_downgrading()
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_SH, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
|
||||
self._reads = 1
|
||||
self._writes = 0
|
||||
self._log_downgraded(wait_time, nattempts)
|
||||
@@ -508,7 +534,7 @@ def upgrade_read_to_write(self, timeout=None):
|
||||
if self._reads == 1 and self._writes == 0:
|
||||
self._log_upgrading()
|
||||
# can raise LockError.
|
||||
wait_time, nattempts = self._lock(fcntl.LOCK_EX, timeout=timeout)
|
||||
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
|
||||
self._reads = 0
|
||||
self._writes = 1
|
||||
self._log_upgraded(wait_time, nattempts)
|
||||
@@ -592,6 +618,12 @@ def release_write(self, release_fn=None):
|
||||
else:
|
||||
return False
|
||||
|
||||
def cleanup(self):
|
||||
if self._reads == 0 and self._writes == 0:
|
||||
os.unlink(self.path)
|
||||
else:
|
||||
raise LockError("Attempting to cleanup active lock.")
|
||||
|
||||
def _get_counts_desc(self):
|
||||
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
|
||||
if tty.is_verbose() else ''
|
||||
|
112
lib/spack/llnl/util/symlink.py
Normal file
112
lib/spack/llnl/util/symlink.py
Normal file
@@ -0,0 +1,112 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
from sys import platform as _platform
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
is_windows = _platform == 'win32'
|
||||
|
||||
if is_windows:
|
||||
from win32file import CreateHardLink
|
||||
|
||||
|
||||
def symlink(real_path, link_path):
|
||||
"""
|
||||
Create a symbolic link.
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
if not is_windows or _win32_can_symlink():
|
||||
os.symlink(real_path, link_path)
|
||||
else:
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
|
||||
|
||||
def islink(path):
|
||||
return os.path.islink(path) or _win32_is_junction(path)
|
||||
|
||||
|
||||
# '_win32' functions based on
|
||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||
def _win32_junction(path, link):
|
||||
# junctions require absolute paths
|
||||
if not os.path.isabs(link):
|
||||
link = os.path.abspath(link)
|
||||
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, 'File exists: %s -> %s' % (link, path))
|
||||
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
path = os.path.join(parent, path)
|
||||
path = os.path.abspath(path)
|
||||
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _win32_can_symlink():
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = join(tempdir, 'dpath')
|
||||
fpath = join(tempdir, 'fpath.txt')
|
||||
|
||||
dlink = join(tempdir, 'dlink')
|
||||
flink = join(tempdir, 'flink.txt')
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
fs.touchp(fpath)
|
||||
|
||||
try:
|
||||
os.symlink(dpath, dlink)
|
||||
can_symlink_directories = os.path.islink(dlink)
|
||||
except OSError:
|
||||
can_symlink_directories = False
|
||||
|
||||
try:
|
||||
os.symlink(fpath, flink)
|
||||
can_symlink_files = os.path.islink(flink)
|
||||
except OSError:
|
||||
can_symlink_files = False
|
||||
|
||||
# Cleanup the test directory
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
return can_symlink_directories and can_symlink_files
|
||||
|
||||
|
||||
def _win32_is_junction(path):
|
||||
"""
|
||||
Determines if a path is a win32 junction
|
||||
"""
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
|
||||
if is_windows:
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and \
|
||||
bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
|
||||
return False
|
@@ -6,19 +6,22 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
import sys
|
||||
import termios
|
||||
import textwrap
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
|
||||
import six
|
||||
from six import StringIO
|
||||
from six.moves import input
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
import termios
|
||||
|
||||
from llnl.util.tty.color import cescape, clen, cprint, cwrite
|
||||
|
||||
# Globals
|
||||
@@ -143,7 +146,7 @@ def process_stacktrace(countback):
|
||||
file_list = []
|
||||
for frame in st:
|
||||
# Check that the file is a spack file
|
||||
if frame[0].find("/spack") >= 0:
|
||||
if frame[0].find(os.path.sep + "spack") >= 0:
|
||||
file_list.append(frame[0])
|
||||
# We use commonprefix to find what the spack 'root' directory is.
|
||||
root_dir = os.path.commonprefix(file_list)
|
||||
@@ -370,22 +373,29 @@ def hline(label=None, **kwargs):
|
||||
|
||||
def terminal_size():
|
||||
"""Gets the dimensions of the console: (rows, cols)."""
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
rc = struct.unpack('hh', fcntl.ioctl(
|
||||
fd, termios.TIOCGWINSZ, '1234'))
|
||||
except BaseException:
|
||||
return
|
||||
return rc
|
||||
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not rc:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
rc = ioctl_gwinsz(fd)
|
||||
os.close(fd)
|
||||
except BaseException:
|
||||
pass
|
||||
if not rc:
|
||||
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
if _platform != "win32":
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
rc = struct.unpack('hh', fcntl.ioctl(
|
||||
fd, termios.TIOCGWINSZ, '1234'))
|
||||
except BaseException:
|
||||
return
|
||||
return rc
|
||||
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not rc:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
rc = ioctl_gwinsz(fd)
|
||||
os.close(fd)
|
||||
except BaseException:
|
||||
pass
|
||||
if not rc:
|
||||
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
|
||||
return int(rc[0]), int(rc[1])
|
||||
return int(rc[0]), int(rc[1])
|
||||
else:
|
||||
if sys.version_info[0] < 3:
|
||||
raise RuntimeError("Terminal size not obtainable on Windows with a\
|
||||
Python version older than 3")
|
||||
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
return int(rc[0]), int(rc[1])
|
||||
|
@@ -8,15 +8,19 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import atexit
|
||||
import ctypes
|
||||
import errno
|
||||
import io
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from threading import Thread
|
||||
from types import ModuleType # novm
|
||||
from typing import Optional # novm
|
||||
|
||||
@@ -399,7 +403,7 @@ def replace_environment(env):
|
||||
os.environ[name] = val
|
||||
|
||||
|
||||
class log_output(object):
|
||||
def log_output(*args, **kwargs):
|
||||
"""Context manager that logs its output to a file.
|
||||
|
||||
In the simplest case, the usage looks like this::
|
||||
@@ -414,6 +418,7 @@ class log_output(object):
|
||||
with log_output('logfile.txt', echo=True):
|
||||
# do things ... output will be logged and printed out
|
||||
|
||||
The following is available on Unix only. No-op on Windows.
|
||||
And, if you just want to echo *some* stuff from the parent, use
|
||||
``force_echo``::
|
||||
|
||||
@@ -423,6 +428,20 @@ class log_output(object):
|
||||
with logger.force_echo():
|
||||
# things here will be echoed *and* logged
|
||||
|
||||
See individual log classes for more information.
|
||||
|
||||
|
||||
This method is actually a factory serving a per platform
|
||||
(unix vs windows) log_output class
|
||||
"""
|
||||
if sys.platform == 'win32':
|
||||
return winlog(*args, **kwargs)
|
||||
else:
|
||||
return nixlog(*args, **kwargs)
|
||||
|
||||
|
||||
class nixlog(object):
|
||||
"""
|
||||
Under the hood, we spawn a daemon and set up a pipe between this
|
||||
process and the daemon. The daemon writes our output to both the
|
||||
file and to stdout (if echoing). The parent process can communicate
|
||||
@@ -564,7 +583,7 @@ def __enter__(self):
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
# Now do the actual output rediction.
|
||||
# Now do the actual output redirection.
|
||||
self.use_fds = _file_descriptors_work(sys.stdout, sys.stderr)
|
||||
if self.use_fds:
|
||||
# We try first to use OS-level file descriptors, as this
|
||||
@@ -671,6 +690,175 @@ def force_echo(self):
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
class StreamWrapper:
|
||||
""" Wrapper class to handle redirection of io streams """
|
||||
def __init__(self, sys_attr):
|
||||
self.sys_attr = sys_attr
|
||||
self.saved_stream = None
|
||||
if sys.platform.startswith('win32'):
|
||||
if sys.version_info < (3, 5):
|
||||
libc = ctypes.CDLL(ctypes.util.find_library('c'))
|
||||
else:
|
||||
if hasattr(sys, 'gettotalrefcount'): # debug build
|
||||
libc = ctypes.CDLL('ucrtbased')
|
||||
else:
|
||||
libc = ctypes.CDLL('api-ms-win-crt-stdio-l1-1-0')
|
||||
|
||||
kernel32 = ctypes.WinDLL('kernel32')
|
||||
|
||||
# https://docs.microsoft.com/en-us/windows/console/getstdhandle
|
||||
if self.sys_attr == 'stdout':
|
||||
STD_HANDLE = -11
|
||||
elif self.sys_attr == 'stderr':
|
||||
STD_HANDLE = -12
|
||||
else:
|
||||
raise KeyError(self.sys_attr)
|
||||
|
||||
c_stdout = kernel32.GetStdHandle(STD_HANDLE)
|
||||
self.libc = libc
|
||||
self.c_stream = c_stdout
|
||||
else:
|
||||
self.libc = ctypes.CDLL(None)
|
||||
self.c_stream = ctypes.c_void_p.in_dll(self.libc, self.sys_attr)
|
||||
self.sys_stream = getattr(sys, self.sys_attr)
|
||||
self.orig_stream_fd = self.sys_stream.fileno()
|
||||
# Save a copy of the original stdout fd in saved_stream
|
||||
self.saved_stream = os.dup(self.orig_stream_fd)
|
||||
|
||||
def redirect_stream(self, to_fd):
|
||||
"""Redirect stdout to the given file descriptor."""
|
||||
# Flush the C-level buffer stream
|
||||
if sys.platform.startswith('win32'):
|
||||
self.libc.fflush(None)
|
||||
else:
|
||||
self.libc.fflush(self.c_stream)
|
||||
# Flush and close sys_stream - also closes the file descriptor (fd)
|
||||
sys_stream = getattr(sys, self.sys_attr)
|
||||
sys_stream.flush()
|
||||
sys_stream.close()
|
||||
# Make orig_stream_fd point to the same file as to_fd
|
||||
os.dup2(to_fd, self.orig_stream_fd)
|
||||
# Set sys_stream to a new stream that points to the redirected fd
|
||||
new_buffer = open(self.orig_stream_fd, 'wb')
|
||||
new_stream = io.TextIOWrapper(new_buffer)
|
||||
setattr(sys, self.sys_attr, new_stream)
|
||||
self.sys_stream = getattr(sys, self.sys_attr)
|
||||
|
||||
def flush(self):
|
||||
if sys.platform.startswith('win32'):
|
||||
self.libc.fflush(None)
|
||||
else:
|
||||
self.libc.fflush(self.c_stream)
|
||||
self.sys_stream.flush()
|
||||
|
||||
def close(self):
|
||||
"""Redirect back to the original system stream, and close stream"""
|
||||
try:
|
||||
if self.saved_stream is not None:
|
||||
self.redirect_stream(self.saved_stream)
|
||||
finally:
|
||||
if self.saved_stream is not None:
|
||||
os.close(self.saved_stream)
|
||||
|
||||
|
||||
class winlog(object):
|
||||
"""
|
||||
Similar to nixlog, with underlying
|
||||
functionality ported to support Windows.
|
||||
|
||||
Does not support the use of 'v' toggling as nixlog does.
|
||||
"""
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
env=None, filter_fn=None):
|
||||
self.env = env
|
||||
self.debug = debug
|
||||
self.echo = echo
|
||||
self.logfile = file_like
|
||||
self.stdout = StreamWrapper('stdout')
|
||||
self.stderr = StreamWrapper('stderr')
|
||||
self._active = False
|
||||
self._ioflag = False
|
||||
self.old_stdout = sys.stdout
|
||||
self.old_stderr = sys.stderr
|
||||
|
||||
def __enter__(self):
|
||||
if self._active:
|
||||
raise RuntimeError("Can't re-enter the same log_output!")
|
||||
|
||||
if self.logfile is None:
|
||||
raise RuntimeError(
|
||||
"file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == StringIO:
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
sys.stderr = self.logfile
|
||||
else:
|
||||
self.writer = open(self.logfile, mode='wb+')
|
||||
self.reader = open(self.logfile, mode='rb+')
|
||||
|
||||
# Dup stdout so we can still write to it after redirection
|
||||
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
|
||||
# Redirect stdout and stderr to write to logfile
|
||||
self.stderr.redirect_stream(self.writer.fileno())
|
||||
self.stdout.redirect_stream(self.writer.fileno())
|
||||
self._kill = threading.Event()
|
||||
|
||||
def background_reader(reader, echo_writer, _kill):
|
||||
# for each line printed to logfile, read it
|
||||
# if echo: write line to user
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
line = reader.readline()
|
||||
while line:
|
||||
if self.echo:
|
||||
self.echo_writer.write('{0}'.format(line.decode()))
|
||||
self.echo_writer.flush()
|
||||
line = reader.readline()
|
||||
|
||||
if is_killed:
|
||||
break
|
||||
|
||||
self._active = True
|
||||
with replace_environment(self.env):
|
||||
self._thread = Thread(target=background_reader,
|
||||
args=(self.reader, self.echo_writer, self._kill))
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._ioflag:
|
||||
sys.stdout = self.old_stdout
|
||||
sys.stderr = self.old_stderr
|
||||
self._ioflag = False
|
||||
else:
|
||||
self.writer.close()
|
||||
self.reader.close()
|
||||
self.echo_writer.flush()
|
||||
self.stdout.flush()
|
||||
self.stderr.flush()
|
||||
self._kill.set()
|
||||
self._thread.join()
|
||||
self.stdout.close()
|
||||
self.stderr.close()
|
||||
self._active = False
|
||||
|
||||
@contextmanager
|
||||
def force_echo(self):
|
||||
"""Context manager to force local echo, even if echo is off."""
|
||||
if not self._active:
|
||||
raise RuntimeError(
|
||||
"Can't call force_echo() outside log_output region!")
|
||||
try:
|
||||
yield self
|
||||
finally:
|
||||
pass
|
||||
|
||||
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
log_file_wrapper, control_pipe, filter_fn):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
@@ -11,6 +11,7 @@
|
||||
things like timeouts in ``ProcessController.wait()``, which are set to
|
||||
get tests done quickly, not to avoid high CPU usage.
|
||||
|
||||
Note: The functionality in this module is unsupported on Windows
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
@@ -19,7 +20,6 @@
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import termios
|
||||
import time
|
||||
import traceback
|
||||
|
||||
@@ -27,6 +27,13 @@
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
termios = None
|
||||
try:
|
||||
import termios as term_mod
|
||||
termios = term_mod
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class ProcessController(object):
|
||||
"""Wrapper around some fundamental process control operations.
|
||||
|
@@ -3,10 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 17, 1)
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 18, 0, 'dev0')
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||
|
||||
__all__ = ['spack_version_info', 'spack_version']
|
||||
__version__ = spack_version
|
||||
|
@@ -41,11 +41,14 @@ def _search_duplicate_compilers(error_cls):
|
||||
|
||||
from six.moves.urllib.request import urlopen
|
||||
|
||||
try:
|
||||
from collections.abc import Sequence # novm
|
||||
except ImportError:
|
||||
from collections import Sequence
|
||||
import llnl.util.lang
|
||||
from llnl.util.compat import Sequence
|
||||
|
||||
import spack.config
|
||||
import spack.patch
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
CALLBACKS = {}
|
||||
@@ -180,7 +183,6 @@ def run_check(tag, **kwargs):
|
||||
@config_compiler
|
||||
def _search_duplicate_compilers(error_cls):
|
||||
"""Report compilers with the same spec and two different definitions"""
|
||||
import spack.config
|
||||
errors = []
|
||||
|
||||
compilers = list(sorted(
|
||||
@@ -217,8 +219,6 @@ def _search_duplicate_compilers(error_cls):
|
||||
@config_packages
|
||||
def _search_duplicate_specs_in_externals(error_cls):
|
||||
"""Search for duplicate specs declared as externals"""
|
||||
import spack.config
|
||||
|
||||
errors, externals = [], collections.defaultdict(list)
|
||||
packages_yaml = spack.config.get('packages')
|
||||
|
||||
@@ -265,6 +265,7 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
|
||||
#: Sanity checks on linting
|
||||
# This can take some time, so it's run separately from packages
|
||||
package_https_directives = AuditClass(
|
||||
@@ -275,15 +276,40 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
for condition, patches in pkg.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if not re.match(github_patch_url_re, patch.url):
|
||||
continue
|
||||
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg.name, full_index_arg,
|
||||
),
|
||||
[patch.url],
|
||||
))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links
|
||||
"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -308,11 +334,6 @@ def _linting_package_file(pkgs, error_cls):
|
||||
@package_directives
|
||||
def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -367,9 +388,6 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
@package_directives
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -417,8 +435,6 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
import spack.repo
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
@@ -455,7 +471,6 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
|
||||
|
||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
import spack.variant
|
||||
variant_exceptions = (
|
||||
spack.variant.InconsistentValidationError,
|
||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||
|
@@ -2065,14 +2065,13 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
|
||||
|
||||
def download_single_spec(
|
||||
concrete_spec, destination, require_cdashid=False, mirror_url=None
|
||||
concrete_spec, destination, mirror_url=None
|
||||
):
|
||||
"""Download the buildcache files for a single concrete spec.
|
||||
|
||||
Args:
|
||||
concrete_spec: concrete spec to be downloaded
|
||||
destination (str): path where to put the downloaded buildcache
|
||||
require_cdashid (bool): if False the `.cdashid` file is optional
|
||||
mirror_url (str): url of the mirror from which to download
|
||||
"""
|
||||
tarfile_name = tarball_name(concrete_spec, '.spack')
|
||||
@@ -2090,10 +2089,6 @@ def download_single_spec(
|
||||
tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': destination,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [tarball_name(concrete_spec, '.cdashid')],
|
||||
'path': destination,
|
||||
'required': require_cdashid,
|
||||
},
|
||||
]
|
||||
|
||||
|
@@ -727,9 +727,11 @@ def _root_spec(spec_str):
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS.
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == 'darwin':
|
||||
spec_str += ' %apple-clang'
|
||||
elif str(spack.platforms.host()) == 'windows':
|
||||
spec_str += ' %msvc'
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
|
@@ -46,6 +46,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import install, install_tree, mkdirp
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
@@ -373,7 +374,8 @@ def set_wrapper_variables(pkg, env):
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc']))
|
||||
spack.paths.build_env_path,
|
||||
os.path.dirname(pkg.compiler.link_paths['cc']))
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, 'case-insensitive')
|
||||
@@ -526,7 +528,9 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.cmake = Executable('cmake')
|
||||
m.ctest = MakeExecutable('ctest', jobs)
|
||||
|
||||
# Standard build system arguments
|
||||
if sys.platform == 'win32':
|
||||
m.nmake = Executable('nmake')
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakePackage._std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonPackage._std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPackage._std_args(pkg)
|
||||
@@ -545,7 +549,7 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = os.symlink
|
||||
m.symlink = symlink
|
||||
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
@@ -668,11 +672,11 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
|
||||
shared_lib_link = os.path.basename(shared_lib)
|
||||
|
||||
if version or compat_version:
|
||||
os.symlink(shared_lib_link, shared_lib_base)
|
||||
symlink(shared_lib_link, shared_lib_base)
|
||||
|
||||
if compat_version and compat_version != version:
|
||||
os.symlink(shared_lib_link, '{0}.{1}'.format(shared_lib_base,
|
||||
compat_version))
|
||||
symlink(shared_lib_link, '{0}.{1}'.format(shared_lib_base,
|
||||
compat_version))
|
||||
|
||||
return compiler(*compiler_args, output=compiler_output)
|
||||
|
||||
@@ -821,12 +825,13 @@ def setup_package(pkg, dirty, context='build'):
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray:
|
||||
module('unload', 'cray-libsci')
|
||||
for mod in ['cray-mpich', 'cray-libsci']:
|
||||
module('unload', mod)
|
||||
|
||||
if target.module_name:
|
||||
load_module(target.module_name)
|
||||
@@ -1135,7 +1140,8 @@ def child_fun():
|
||||
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.stdin.isatty() and hasattr(sys.stdin, 'fileno'):
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin,
|
||||
'fileno'):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
|
||||
@@ -1143,6 +1149,7 @@ def child_fun():
|
||||
target=_setup_pkg_and_run,
|
||||
args=(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd))
|
||||
|
||||
p.start()
|
||||
|
||||
except InstallError as e:
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.filesystem import force_remove, working_dir
|
||||
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import depends_on
|
||||
from spack.directives import conflicts, depends_on
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
@@ -76,7 +76,7 @@ def patch_config_files(self):
|
||||
or self.spec.satisfies('target=riscv64:'))
|
||||
|
||||
#: Whether or not to update ``libtool``
|
||||
#: (currently only for Arm/Clang/Fujitsu compilers)
|
||||
#: (currently only for Arm/Clang/Fujitsu/NVHPC compilers)
|
||||
patch_libtool = True
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build`
|
||||
@@ -104,6 +104,7 @@ def patch_config_files(self):
|
||||
depends_on('gnuconfig', type='build', when='target=ppc64le:')
|
||||
depends_on('gnuconfig', type='build', when='target=aarch64:')
|
||||
depends_on('gnuconfig', type='build', when='target=riscv64:')
|
||||
conflicts('platform=windows')
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self):
|
||||
@@ -251,7 +252,7 @@ def _set_autotools_environment_variables(self):
|
||||
def _do_patch_libtool(self):
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
flags for the Arm, Clang/Flang, and Fujitsu compilers."""
|
||||
flags for the Arm, Clang/Flang, Fujitsu and NVHPC compilers."""
|
||||
|
||||
# Exit early if we are required not to patch libtool
|
||||
if not self.patch_libtool:
|
||||
@@ -262,9 +263,12 @@ def _do_patch_libtool(self):
|
||||
self._patch_libtool(libtool_path)
|
||||
|
||||
def _patch_libtool(self, libtool_path):
|
||||
if self.spec.satisfies('%arm')\
|
||||
or self.spec.satisfies('%clang')\
|
||||
or self.spec.satisfies('%fj'):
|
||||
if (
|
||||
self.spec.satisfies('%arm') or
|
||||
self.spec.satisfies('%clang') or
|
||||
self.spec.satisfies('%fj') or
|
||||
self.spec.satisfies('%nvhpc')
|
||||
):
|
||||
fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path)
|
||||
fs.filter_file('pic_flag=""\n',
|
||||
'pic_flag="{0}"\n'
|
||||
|
@@ -8,7 +8,8 @@
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
from typing import List # novm
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import six
|
||||
|
||||
@@ -18,6 +19,7 @@
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -91,7 +93,12 @@ class CMakePackage(PackageBase):
|
||||
#:
|
||||
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
#: for more information.
|
||||
generator = 'Unix Makefiles'
|
||||
|
||||
generator = "Unix Makefiles"
|
||||
|
||||
if sys.platform == 'win32':
|
||||
generator = "Ninja"
|
||||
depends_on('ninja')
|
||||
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
@@ -138,10 +145,11 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def _std_args(pkg):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
|
||||
try:
|
||||
generator = pkg.generator
|
||||
except AttributeError:
|
||||
generator = 'Unix Makefiles'
|
||||
generator = CMakePackage.generator
|
||||
|
||||
# Make sure a valid generator was chosen
|
||||
valid_primary_generators = ['Unix Makefiles', 'Ninja']
|
||||
@@ -166,7 +174,7 @@ def _std_args(pkg):
|
||||
define = CMakePackage.define
|
||||
args = [
|
||||
'-G', generator,
|
||||
define('CMAKE_INSTALL_PREFIX', pkg.prefix),
|
||||
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
|
||||
define('CMAKE_BUILD_TYPE', build_type),
|
||||
]
|
||||
|
||||
@@ -185,7 +193,7 @@ def _std_args(pkg):
|
||||
|
||||
# Set up CMake rpath
|
||||
args.extend([
|
||||
define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False),
|
||||
define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', True),
|
||||
define('CMAKE_INSTALL_RPATH',
|
||||
spack.build_environment.get_rpaths(pkg)),
|
||||
define('CMAKE_PREFIX_PATH',
|
||||
|
@@ -188,7 +188,3 @@ def cuda_flags(arch_list):
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
for value in cuda_arch_values:
|
||||
conflicts('~cuda', when='cuda_arch=' + value)
|
||||
|
@@ -686,15 +686,15 @@ def openmp_libs(self):
|
||||
# packages.yaml), specificially to provide the 'iomp5' libs.
|
||||
|
||||
elif '%gcc' in self.spec:
|
||||
gcc = Executable(self.compiler.cc)
|
||||
omp_lib_path = gcc(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
with self.compiler.compiler_environment():
|
||||
omp_lib_path = Executable(self.compiler.cc)(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
omp_libs = LibraryList(omp_lib_path.strip())
|
||||
|
||||
elif '%clang' in self.spec:
|
||||
clang = Executable(self.compiler.cc)
|
||||
omp_lib_path = clang(
|
||||
'--print-file-name', 'libomp.%s' % dso_suffix, output=str)
|
||||
with self.compiler.compiler_environment():
|
||||
omp_lib_path = Executable(self.compiler.cc)(
|
||||
'--print-file-name', 'libomp.%s' % dso_suffix, output=str)
|
||||
omp_libs = LibraryList(omp_lib_path.strip())
|
||||
|
||||
if len(omp_libs) < 1:
|
||||
@@ -735,8 +735,9 @@ def tbb_libs(self):
|
||||
|
||||
# TODO: clang(?)
|
||||
gcc = self._gcc_executable # must be gcc, not self.compiler.cc
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
with self.compiler.compiler_environment():
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
|
||||
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
|
||||
debug_print(libs)
|
||||
@@ -746,8 +747,9 @@ def tbb_libs(self):
|
||||
def _tbb_abi(self):
|
||||
'''Select the ABI needed for linking TBB'''
|
||||
gcc = self._gcc_executable
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
with self.compiler.compiler_environment():
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
abi = ''
|
||||
if sys.platform == 'darwin':
|
||||
pass
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import conflicts
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
@@ -55,6 +56,7 @@ class MakefilePackage(PackageBase):
|
||||
#: phase
|
||||
install_targets = ['install']
|
||||
|
||||
conflicts('platform=windows')
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ['check']
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import (
|
||||
filter_file,
|
||||
find,
|
||||
get_filetype,
|
||||
is_nonsymlink_exe_with_shebang,
|
||||
path_contains_subdirectory,
|
||||
same_path,
|
||||
working_dir,
|
||||
@@ -216,7 +216,7 @@ def view_file_conflicts(self, view, merge_map):
|
||||
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map):
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
python_is_external = self.extendee_spec.external
|
||||
@@ -230,7 +230,7 @@ def add_files_to_view(self, view, merge_map):
|
||||
view.link(src, dst)
|
||||
elif not os.path.islink(src):
|
||||
shutil.copy2(src, dst)
|
||||
is_script = 'script' in get_filetype(src)
|
||||
is_script = is_nonsymlink_exe_with_shebang(src)
|
||||
if is_script and not python_is_external:
|
||||
filter_file(
|
||||
python_prefix, os.path.abspath(
|
||||
|
70
lib/spack/spack/build_systems/racket.py
Normal file
70
lib/spack/spack/build_systems/racket.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class RacketPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Racket's
|
||||
`raco pkg install` and `raco setup` commands.
|
||||
|
||||
This class provides the following phases that can be overridden:
|
||||
|
||||
* install
|
||||
* setup
|
||||
"""
|
||||
#: Package name, version, and extension on PyPI
|
||||
maintainers = ['elfprince13']
|
||||
|
||||
# Default phases
|
||||
phases = ['install']
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = 'RacketPackage'
|
||||
|
||||
extends('racket')
|
||||
|
||||
pkgs = False
|
||||
subdirectory = None
|
||||
name = None
|
||||
parallel = True
|
||||
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.pkgs:
|
||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(self.name)
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
ret = os.getcwd()
|
||||
if self.subdirectory:
|
||||
ret = os.path.join(ret, self.subdirectory)
|
||||
return ret
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
raco = Executable("raco")
|
||||
with working_dir(self.build_directory):
|
||||
allow_parallel = self.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
args = ['pkg', 'install', '-t', 'dir', '-n', self.name, '--deps', 'fail',
|
||||
'--ignore-implies', '--copy', '-i', '-j',
|
||||
str(determine_number_of_jobs(allow_parallel)),
|
||||
'--', os.getcwd()]
|
||||
try:
|
||||
raco(*args)
|
||||
except ProcessError:
|
||||
args.insert(-2, "--skip-installed")
|
||||
raco(*args)
|
||||
tty.warn(("Racket package {0} was already installed, uninstalling via "
|
||||
"Spack may make someone unhappy!").format(self.name))
|
@@ -112,10 +112,6 @@ class ROCmPackage(PackageBase):
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts('amdgpu_target=none', when='+rocm')
|
||||
|
||||
# Make sure amdgpu_targets cannot be used without +rocm
|
||||
for value in amdgpu_targets:
|
||||
conflicts('~rocm', when='amdgpu_target=' + value)
|
||||
|
||||
# https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc
|
||||
# It seems that hip-clang does not (yet?) accept this flag, in which case
|
||||
# we will still need to set the HCC_AMDGPU_TARGET environment flag in the
|
||||
|
@@ -8,6 +8,7 @@
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -85,7 +86,7 @@ def symlink(self, mirror_ref):
|
||||
# to https://github.com/spack/spack/pull/13908)
|
||||
os.unlink(cosmetic_path)
|
||||
mkdirp(os.path.dirname(cosmetic_path))
|
||||
os.symlink(relative_dst, cosmetic_path)
|
||||
symlink(relative_dst, cosmetic_path)
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
|
@@ -5,7 +5,6 @@
|
||||
|
||||
import base64
|
||||
import copy
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@@ -24,7 +23,6 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.compilers as compilers
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
@@ -514,6 +512,82 @@ def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
return needs_list
|
||||
|
||||
|
||||
def get_change_revisions():
|
||||
"""If this is a git repo get the revisions to use when checking
|
||||
for changed packages and spack core modules."""
|
||||
git_dir = os.path.join(spack.paths.prefix, '.git')
|
||||
if os.path.exists(git_dir) and os.path.isdir(git_dir):
|
||||
# TODO: This will only find changed packages from the last
|
||||
# TODO: commit. While this may work for single merge commits
|
||||
# TODO: when merging the topic branch into the base, it will
|
||||
# TODO: require more thought outside of that narrow case.
|
||||
return 'HEAD^', 'HEAD'
|
||||
return None, None
|
||||
|
||||
|
||||
def get_stack_changed(env_path, rev1='HEAD^', rev2='HEAD'):
|
||||
"""Given an environment manifest path and two revisions to compare, return
|
||||
whether or not the stack was changed. Returns True if the environment
|
||||
manifest changed between the provided revisions (or additionally if the
|
||||
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
|
||||
git = exe.which("git")
|
||||
if git:
|
||||
with fs.working_dir(spack.paths.prefix):
|
||||
git_log = git("diff", "--name-only", rev1, rev2,
|
||||
output=str, error=os.devnull,
|
||||
fail_on_error=False).strip()
|
||||
lines = [] if not git_log else re.split(r'\s+', git_log)
|
||||
|
||||
for path in lines:
|
||||
if '.gitlab-ci.yml' in path or path in env_path:
|
||||
tty.debug('env represented by {0} changed'.format(
|
||||
env_path))
|
||||
tty.debug('touched file: {0}'.format(path))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def compute_affected_packages(rev1='HEAD^', rev2='HEAD'):
|
||||
"""Determine which packages were added, removed or changed
|
||||
between rev1 and rev2, and return the names as a set"""
|
||||
return spack.repo.get_all_package_diffs('ARC', rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True):
|
||||
"""Given a list of package names, and assuming an active and
|
||||
concretized environment, return a set of concrete specs from
|
||||
the environment corresponding to any of the affected pkgs (or
|
||||
optionally to any of their dependencies/dependents).
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Active concrete environment
|
||||
affected_pkgs (List[str]): Affected package names
|
||||
dependencies (bool): Include dependencies of affected packages
|
||||
dependents (bool): Include dependents of affected pacakges
|
||||
|
||||
Returns:
|
||||
|
||||
A list of concrete specs from the active environment including
|
||||
those associated with affected packages, and possible their
|
||||
dependencies and dependents as well.
|
||||
"""
|
||||
affected_specs = set()
|
||||
all_concrete_specs = env.all_specs()
|
||||
tty.debug('All concrete environment specs:')
|
||||
for s in all_concrete_specs:
|
||||
tty.debug(' {0}/{1}'.format(s.name, s.dag_hash()[:7]))
|
||||
for pkg in affected_pkgs:
|
||||
env_matches = [s for s in all_concrete_specs if s.name == pkg]
|
||||
for match in env_matches:
|
||||
affected_specs.add(match)
|
||||
if dependencies:
|
||||
affected_specs.update(match.traverse(direction='children', root=False))
|
||||
if dependents:
|
||||
affected_specs.update(match.traverse(direction='parents', root=False))
|
||||
return affected_specs
|
||||
|
||||
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
@@ -546,6 +620,26 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
tty.verbose("Using CDash auth token from environment")
|
||||
cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')
|
||||
|
||||
prune_untouched_packages = os.environ.get('SPACK_PRUNE_UNTOUCHED', None)
|
||||
if prune_untouched_packages:
|
||||
# Requested to prune untouched packages, but assume we won't do that
|
||||
# unless we're actually in a git repo.
|
||||
prune_untouched_packages = False
|
||||
rev1, rev2 = get_change_revisions()
|
||||
tty.debug('Got following revisions: rev1={0}, rev2={1}'.format(rev1, rev2))
|
||||
if rev1 and rev2:
|
||||
# If the stack file itself did not change, proceed with pruning
|
||||
if not get_stack_changed(env.manifest_path, rev1, rev2):
|
||||
prune_untouched_packages = True
|
||||
affected_pkgs = compute_affected_packages(rev1, rev2)
|
||||
tty.debug('affected pkgs:')
|
||||
for p in affected_pkgs:
|
||||
tty.debug(' {0}'.format(p))
|
||||
affected_specs = get_spec_filter_list(env, affected_pkgs)
|
||||
tty.debug('all affected specs:')
|
||||
for s in affected_specs:
|
||||
tty.debug(' {0}'.format(s.name))
|
||||
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
||||
|
||||
@@ -742,6 +836,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
release_spec_build_hash = release_spec.build_hash()
|
||||
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
tty.debug('Pruning {0}, untouched by change.'.format(
|
||||
release_spec.name))
|
||||
spec_record['needs_rebuild'] = False
|
||||
continue
|
||||
|
||||
runner_attribs = find_matching_config(
|
||||
release_spec, gitlab_ci)
|
||||
|
||||
@@ -903,7 +1004,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec:
|
||||
tty.debug('Pruning spec that does not need to be rebuilt.')
|
||||
tty.debug('Pruning {0}, does not need rebuild.'.format(
|
||||
release_spec.name))
|
||||
continue
|
||||
|
||||
if (broken_spec_urls is not None and
|
||||
@@ -923,16 +1025,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
cdash_build_name = get_cdash_build_name(
|
||||
release_spec, build_group)
|
||||
all_job_names.append(cdash_build_name)
|
||||
|
||||
related_builds = [] # Used for relating CDash builds
|
||||
if spec_label in dependencies:
|
||||
related_builds = (
|
||||
[spec_labels[d]['spec'].name
|
||||
for d in dependencies[spec_label]])
|
||||
|
||||
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
|
||||
job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
|
||||
sorted(related_builds))
|
||||
|
||||
variables.update(job_vars)
|
||||
|
||||
@@ -947,7 +1040,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
local_mirror_dir, 'build_cache')
|
||||
artifact_paths.extend([os.path.join(bc_root, p) for p in [
|
||||
bindist.tarball_name(release_spec, '.spec.json'),
|
||||
bindist.tarball_name(release_spec, '.cdashid'),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]])
|
||||
|
||||
@@ -1237,11 +1329,9 @@ def configure_compilers(compiler_action, scope=None):
|
||||
return None
|
||||
|
||||
|
||||
def get_concrete_specs(env, root_spec, job_name, related_builds,
|
||||
compiler_action):
|
||||
def get_concrete_specs(env, root_spec, job_name, compiler_action):
|
||||
spec_map = {
|
||||
'root': None,
|
||||
'deps': {},
|
||||
}
|
||||
|
||||
if compiler_action == 'FIND_ANY':
|
||||
@@ -1265,161 +1355,9 @@ def get_concrete_specs(env, root_spec, job_name, related_builds,
|
||||
spec_map['root'] = concrete_root
|
||||
spec_map[job_name] = concrete_root[job_name]
|
||||
|
||||
if related_builds:
|
||||
for dep_job_name in related_builds.split(';'):
|
||||
spec_map['deps'][dep_job_name] = concrete_root[dep_job_name]
|
||||
|
||||
return spec_map
|
||||
|
||||
|
||||
def register_cdash_build(build_name, base_url, project, site, track):
|
||||
url = base_url + '/api/v1/addBuild.php'
|
||||
time_stamp = datetime.datetime.now().strftime('%Y%m%d-%H%M')
|
||||
build_id = None
|
||||
build_stamp = '{0}-{1}'.format(time_stamp, track)
|
||||
payload = {
|
||||
"project": project,
|
||||
"site": site,
|
||||
"name": build_name,
|
||||
"stamp": build_stamp,
|
||||
}
|
||||
|
||||
tty.debug('Registering cdash build to {0}, payload:'.format(url))
|
||||
tty.debug(payload)
|
||||
|
||||
enc_data = json.dumps(payload).encode('utf-8')
|
||||
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
try:
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Adding build failed (response code = {0}'.format(response_code)
|
||||
tty.warn(msg)
|
||||
return (None, None)
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_id = response_json['buildid']
|
||||
except Exception as e:
|
||||
print("Registering build in CDash failed: {0}".format(e))
|
||||
|
||||
return (build_id, build_stamp)
|
||||
|
||||
|
||||
def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
|
||||
cdashids_mirror_urls):
|
||||
if not job_build_id:
|
||||
return
|
||||
|
||||
dep_map = spec_map['deps']
|
||||
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
cdash_api_url = '{0}/api/v1/relateBuilds.php'.format(cdash_base_url)
|
||||
|
||||
for dep_pkg_name in dep_map:
|
||||
tty.debug('Fetching cdashid file for {0}'.format(dep_pkg_name))
|
||||
dep_spec = dep_map[dep_pkg_name]
|
||||
dep_build_id = None
|
||||
|
||||
for url in cdashids_mirror_urls:
|
||||
try:
|
||||
if url:
|
||||
dep_build_id = read_cdashid_from_mirror(dep_spec, url)
|
||||
break
|
||||
except web_util.SpackWebError:
|
||||
tty.debug('Did not find cdashid for {0} on {1}'.format(
|
||||
dep_pkg_name, url))
|
||||
else:
|
||||
tty.warn('Did not find cdashid for {0} anywhere'.format(
|
||||
dep_pkg_name))
|
||||
return
|
||||
|
||||
payload = {
|
||||
"project": cdash_project,
|
||||
"buildid": job_build_id,
|
||||
"relatedid": dep_build_id,
|
||||
"relationship": "depends on"
|
||||
}
|
||||
|
||||
enc_data = json.dumps(payload).encode('utf-8')
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
request = Request(cdash_api_url, data=enc_data, headers=headers)
|
||||
|
||||
try:
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
except Exception as e:
|
||||
print("Relating builds in CDash failed: {0}".format(e))
|
||||
|
||||
|
||||
def write_cdashid_to_mirror(cdashid, spec, mirror_url):
|
||||
if not spec.concrete:
|
||||
tty.die('Can only write cdashid for concrete spec to mirror')
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
local_cdash_path = os.path.join(tmpdir, 'job.cdashid')
|
||||
with open(local_cdash_path, 'w') as fd:
|
||||
fd.write(cdashid)
|
||||
|
||||
buildcache_name = bindist.tarball_name(spec, '')
|
||||
cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
|
||||
remote_url = os.path.join(
|
||||
mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
|
||||
|
||||
tty.debug('pushing cdashid to url')
|
||||
tty.debug(' local file path: {0}'.format(local_cdash_path))
|
||||
tty.debug(' remote url: {0}'.format(remote_url))
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_cdash_path, remote_url)
|
||||
except Exception as inst:
|
||||
# No matter what went wrong here, don't allow the pipeline to fail
|
||||
# just because there was an issue storing the cdashid on the mirror
|
||||
msg = 'Failed to write cdashid {0} to mirror {1}'.format(
|
||||
cdashid, mirror_url)
|
||||
tty.warn(inst)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def read_cdashid_from_mirror(spec, mirror_url):
|
||||
if not spec.concrete:
|
||||
tty.die('Can only read cdashid for concrete spec from mirror')
|
||||
|
||||
buildcache_name = bindist.tarball_name(spec, '')
|
||||
cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
|
||||
url = os.path.join(
|
||||
mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
|
||||
|
||||
resp_url, resp_headers, response = web_util.read_from_url(url)
|
||||
contents = response.fp.read()
|
||||
|
||||
return int(contents)
|
||||
|
||||
|
||||
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
unsigned = not sign_binaries
|
||||
@@ -1570,7 +1508,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
|
||||
# Next attempt to clone your local spack repo into the repro dir
|
||||
with fs.working_dir(repro_dir):
|
||||
clone_out = git("clone", spack_git_path,
|
||||
clone_out = git("clone", spack_git_path, "spack",
|
||||
output=str, error=os.devnull,
|
||||
fail_on_error=False)
|
||||
|
||||
|
@@ -308,8 +308,7 @@ def optimizer(yaml):
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
'variables': {
|
||||
'SPACK_COMPILER_ACTION': 'NONE',
|
||||
'SPACK_RELATED_BUILDS_CDASH': ''
|
||||
'SPACK_COMPILER_ACTION': 'NONE'
|
||||
},
|
||||
|
||||
'after_script': ['rm -rf "./spack"'],
|
||||
|
@@ -26,6 +26,10 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def activate(parser, args):
|
||||
|
||||
tty.warn("spack activate is deprecated in favor of "
|
||||
"environments and will be removed in v0.19.0")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) != 1:
|
||||
tty.die("activate requires one spec. %d given." % len(specs))
|
||||
|
@@ -180,9 +180,6 @@ def setup_parser(subparser):
|
||||
download.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
help="Path to directory where tarball should be downloaded")
|
||||
download.add_argument(
|
||||
'-c', '--require-cdashid', action='store_true', default=False,
|
||||
help="Require .cdashid file to be downloaded with buildcache entry")
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
# Get buildcache name
|
||||
@@ -394,11 +391,17 @@ def preview_fn(args):
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.store.find(constraints, multiple=True)
|
||||
|
||||
def status_fn(spec):
|
||||
if spack.relocate.is_relocatable(spec):
|
||||
return spec.install_stati.installed
|
||||
else:
|
||||
return spec.install_stati.unknown
|
||||
|
||||
# Cycle over the specs that match
|
||||
for spec in specs:
|
||||
print("Relocatable nodes")
|
||||
print("--------------------------------")
|
||||
print(spec.tree(status_fn=spack.relocate.is_relocatable))
|
||||
print(spec.tree(status_fn=status_fn))
|
||||
|
||||
|
||||
def check_fn(args):
|
||||
@@ -440,9 +443,7 @@ def download_fn(args):
|
||||
"""Download buildcache entry from a remote mirror to local folder. This
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components. Normally, just the
|
||||
tarball and .spec.json files are required, but if the --require-cdashid
|
||||
argument was provided, then a .cdashid file is also required."""
|
||||
least one of the required buildcache components."""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg('No specs provided, exiting.')
|
||||
sys.exit(0)
|
||||
@@ -452,9 +453,7 @@ def download_fn(args):
|
||||
sys.exit(0)
|
||||
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(
|
||||
spec, args.path, require_cdashid=args.require_cdashid
|
||||
)
|
||||
result = bindist.download_single_spec(spec, args.path)
|
||||
|
||||
if not result:
|
||||
sys.exit(1)
|
||||
@@ -560,11 +559,6 @@ def copy_fn(args):
|
||||
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
cdashidfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
|
||||
cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)
|
||||
|
||||
# Make sure directory structure exists before attempting to copy
|
||||
os.makedirs(os.path.dirname(tarball_dest_path))
|
||||
|
||||
@@ -578,11 +572,6 @@ def copy_fn(args):
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
|
||||
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
|
||||
|
||||
# Copy the cdashid file (if exists) to the destination mirror
|
||||
if os.path.exists(cdashid_src_path):
|
||||
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
|
||||
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
""" Syncs binaries (and associated metadata) from one mirror to another.
|
||||
@@ -667,8 +656,6 @@ def sync_fn(args):
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.json')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
|
||||
])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
@@ -57,32 +57,32 @@ def checksum(parser, args):
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
url_dict = {}
|
||||
if args.versions:
|
||||
# If the user asked for specific versions, use those
|
||||
for version in args.versions:
|
||||
versions = args.versions
|
||||
if (not versions) and args.preferred:
|
||||
versions = [preferred_version(pkg)]
|
||||
|
||||
if versions:
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or "
|
||||
"version ranges. Use unambiguous versions.")
|
||||
url_dict[version] = pkg.url_for_version(version)
|
||||
elif args.preferred:
|
||||
version = preferred_version(pkg)
|
||||
url_dict = dict([(version, pkg.url_for_version(version))])
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
else:
|
||||
# Otherwise, see what versions we can find online
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
if not url_dict:
|
||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
||||
|
||||
# And ensure the specified version URLs take precedence, if available
|
||||
try:
|
||||
explicit_dict = {}
|
||||
for v in pkg.versions:
|
||||
if not v.isdevelop():
|
||||
explicit_dict[v] = pkg.url_for_version(v)
|
||||
url_dict.update(explicit_dict)
|
||||
except spack.package.NoURLError:
|
||||
pass
|
||||
if not url_dict:
|
||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage,
|
||||
|
@@ -196,7 +196,6 @@ def ci_rebuild(args):
|
||||
job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
@@ -236,7 +235,6 @@ def ci_rebuild(args):
|
||||
tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
|
||||
tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
|
||||
tty.debug('cdash_site = {0}'.format(cdash_site))
|
||||
tty.debug('related_builds = {0}'.format(related_builds))
|
||||
tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))
|
||||
|
||||
# Is this a pipeline run on a spack PR or a merge to develop? It might
|
||||
@@ -279,7 +277,7 @@ def ci_rebuild(args):
|
||||
# Whatever form of root_spec we got, use it to get a map giving us concrete
|
||||
# specs for this job and all of its dependencies.
|
||||
spec_map = spack_ci.get_concrete_specs(
|
||||
env, root_spec, job_spec_pkg_name, related_builds, compiler_action)
|
||||
env, root_spec, job_spec_pkg_name, compiler_action)
|
||||
job_spec = spec_map[job_spec_pkg_name]
|
||||
|
||||
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
|
||||
@@ -362,8 +360,10 @@ def ci_rebuild(args):
|
||||
# Write information about spack into an artifact in the repro dir
|
||||
spack_info = spack_ci.get_spack_info()
|
||||
spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
|
||||
with open(spack_info_file, 'w') as fd:
|
||||
fd.write('\n{0}\n'.format(spack_info))
|
||||
with open(spack_info_file, 'wb') as fd:
|
||||
fd.write(b'\n')
|
||||
fd.write(spack_info.encode('utf8'))
|
||||
fd.write(b'\n')
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a full hash match already
|
||||
@@ -373,9 +373,6 @@ def ci_rebuild(args):
|
||||
pipeline_mirror_url,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
cdash_build_id = None
|
||||
cdash_build_stamp = None
|
||||
|
||||
# Check configured mirrors for a built spec with a matching full hash
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, full_hash_match=True, index_only=False)
|
||||
@@ -400,7 +397,6 @@ def ci_rebuild(args):
|
||||
bindist.download_single_spec(
|
||||
job_spec,
|
||||
build_cache_dir,
|
||||
require_cdashid=False,
|
||||
mirror_url=matching_mirror
|
||||
)
|
||||
|
||||
@@ -427,16 +423,8 @@ def ci_rebuild(args):
|
||||
if not verify_binaries:
|
||||
install_args.append('--no-check-signature')
|
||||
|
||||
# If CDash reporting is enabled, we first register this build with
|
||||
# the specified CDash instance, then relate the build to those of
|
||||
# its dependencies.
|
||||
if enable_cdash:
|
||||
tty.debug('CDash: Registering build')
|
||||
(cdash_build_id,
|
||||
cdash_build_stamp) = spack_ci.register_cdash_build(
|
||||
cdash_build_name, cdash_base_url, cdash_project,
|
||||
cdash_site, job_spec_buildgroup)
|
||||
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
cdash_upload_url = '{0}/submit.php?project={1}'.format(
|
||||
cdash_base_url, cdash_project_enc)
|
||||
|
||||
@@ -444,15 +432,9 @@ def ci_rebuild(args):
|
||||
'--cdash-upload-url', cdash_upload_url,
|
||||
'--cdash-build', cdash_build_name,
|
||||
'--cdash-site', cdash_site,
|
||||
'--cdash-buildstamp', cdash_build_stamp,
|
||||
'--cdash-track', job_spec_buildgroup,
|
||||
])
|
||||
|
||||
if cdash_build_id is not None:
|
||||
tty.debug('CDash: Relating build with dependency builds')
|
||||
spack_ci.relate_cdash_builds(
|
||||
spec_map, cdash_base_url, cdash_build_id, cdash_project,
|
||||
[pipeline_mirror_url, pr_mirror_url, remote_mirror_url])
|
||||
|
||||
# A compiler action of 'FIND_ANY' means we are building a bootstrap
|
||||
# compiler or one of its deps.
|
||||
# TODO: when compilers are dependencies, we should include --no-add
|
||||
@@ -560,12 +542,6 @@ def ci_rebuild(args):
|
||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
cdash_build_id, buildcache_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, buildcache_mirror_url)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
@@ -575,12 +551,6 @@ def ci_rebuild(args):
|
||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
cdash_build_id, pipeline_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, pipeline_mirror_url)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
|
@@ -117,7 +117,7 @@ def format(self, cmd):
|
||||
'virtual': '_providers',
|
||||
'namespace': '_repos',
|
||||
'hash': '_all_resource_hashes',
|
||||
'pytest': '_tests',
|
||||
'pytest': '_unit_tests',
|
||||
}
|
||||
|
||||
|
||||
|
@@ -35,6 +35,9 @@ def shell_init_instructions(cmd, equivalent):
|
||||
color.colorize("@*c{For fish:}"),
|
||||
" source %s/setup-env.fish" % spack.paths.share_path,
|
||||
"",
|
||||
color.colorize("@*c{For Windows batch:}"),
|
||||
" source %s/spack_cmd.bat" % spack.paths.share_path,
|
||||
"",
|
||||
"Or, if you do not want to use shell support, run " + (
|
||||
"one of these" if shell_specific else "this") + " instead:",
|
||||
"",
|
||||
@@ -45,6 +48,7 @@ def shell_init_instructions(cmd, equivalent):
|
||||
equivalent.format(sh_arg="--sh ") + " # bash/zsh/sh",
|
||||
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
||||
equivalent.format(sh_arg="--fish") + " # fish",
|
||||
equivalent.format(sh_arg="--bat ") + " # batch"
|
||||
]
|
||||
else:
|
||||
msg += [" " + equivalent]
|
||||
|
@@ -382,9 +382,14 @@ def add_concretizer_args(subparser):
|
||||
)
|
||||
subgroup.add_argument(
|
||||
'--reuse', action=ConfigSetAction, dest="concretizer:reuse",
|
||||
const=True, default=None,
|
||||
const="any", default=None,
|
||||
help='reuse installed dependencies/buildcaches when possible'
|
||||
)
|
||||
subgroup.add_argument(
|
||||
'--reuse-only', action=ConfigSetAction, dest="concretizer:reuse",
|
||||
const=True, default=None,
|
||||
help='operate as a binary package manager'
|
||||
)
|
||||
|
||||
|
||||
def add_s3_connection_args(subparser, add_help):
|
||||
|
@@ -258,6 +258,42 @@ def install(self, spec, prefix):
|
||||
bazel()"""
|
||||
|
||||
|
||||
class RacketPackageTemplate(PackageTemplate):
|
||||
"""Provides approriate overrides for Racket extensions"""
|
||||
base_class_name = 'RacketPackage'
|
||||
|
||||
url_line = """\
|
||||
# FIXME: set the proper location from which to fetch your package
|
||||
git = "git@github.com:example/example.git"
|
||||
"""
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required. Only add the racket dependency
|
||||
# if you need specific versions. A generic racket dependency is
|
||||
# added implicity by the RacketPackage class.
|
||||
# depends_on('racket@8.3:', type=('build', 'run'))"""
|
||||
|
||||
body_def = """\
|
||||
# FIXME: specify the name of the package,
|
||||
# as it should appear to ``raco pkg install``
|
||||
name = '{0}'
|
||||
# FIXME: set to true if published on pkgs.racket-lang.org
|
||||
# pkgs = False
|
||||
# FIXME: specify path to the root directory of the
|
||||
# package, if not the base directory
|
||||
# subdirectory = None
|
||||
"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
|
||||
if not name.startswith('rkt-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
||||
name = 'rkt-{0}'.format(name)
|
||||
self.body_def = self.body_def.format(name[4:])
|
||||
super(RacketPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PythonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for python extensions"""
|
||||
base_class_name = 'PythonPackage'
|
||||
@@ -536,6 +572,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
'bazel': BazelPackageTemplate,
|
||||
'python': PythonPackageTemplate,
|
||||
'r': RPackageTemplate,
|
||||
'racket': RacketPackageTemplate,
|
||||
'perlmake': PerlmakePackageTemplate,
|
||||
'perlbuild': PerlbuildPackageTemplate,
|
||||
'octave': OctavePackageTemplate,
|
||||
@@ -758,7 +795,15 @@ def get_versions(args, name):
|
||||
# Default guesser
|
||||
guesser = BuildSystemGuesser()
|
||||
|
||||
if args.url is not None and args.template != 'bundle':
|
||||
valid_url = True
|
||||
try:
|
||||
spack.util.url.require_url_format(args.url)
|
||||
if args.url.startswith('file://'):
|
||||
valid_url = False # No point in spidering these
|
||||
except ValueError:
|
||||
valid_url = False
|
||||
|
||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.util.web.find_versions_of_archive(args.url)
|
||||
|
@@ -8,9 +8,9 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.graph
|
||||
import spack.store
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.graph import topological_sort
|
||||
|
||||
description = "deactivate a package extension"
|
||||
section = "extensions"
|
||||
@@ -32,6 +32,10 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def deactivate(parser, args):
|
||||
|
||||
tty.warn("spack deactivate is deprecated in favor of "
|
||||
"environments and will be removed in v0.19.0")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) != 1:
|
||||
tty.die("deactivate requires one spec. %d given." % len(specs))
|
||||
@@ -68,11 +72,8 @@ def deactivate(parser, args):
|
||||
tty.msg("Deactivating %s and all dependencies." %
|
||||
pkg.spec.short_spec)
|
||||
|
||||
topo_order = topological_sort(spec)
|
||||
index = spec.index()
|
||||
|
||||
for name in topo_order:
|
||||
espec = index[name]
|
||||
nodes_in_topological_order = spack.graph.topological_sort(spec)
|
||||
for espec in reversed(nodes_in_topological_order):
|
||||
epkg = espec.package
|
||||
if epkg.extends(pkg.extendee_spec):
|
||||
if epkg.is_activated(view) or args.force:
|
||||
|
@@ -19,6 +19,7 @@
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
@@ -123,7 +124,7 @@ def deprecate(parser, args):
|
||||
if not answer:
|
||||
tty.die('Will not deprecate any packages.')
|
||||
|
||||
link_fn = os.link if args.link_type == 'hard' else os.symlink
|
||||
link_fn = os.link if args.link_type == 'hard' else symlink
|
||||
|
||||
for dcate, dcator in zip(all_deprecate, all_deprecators):
|
||||
dcate.package.do_deprecate(dcator, link_fn)
|
||||
|
@@ -60,6 +60,9 @@ def env_activate_setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to activate the environment")
|
||||
shells.add_argument(
|
||||
'--bat', action='store_const', dest='shell', const='bat',
|
||||
help="print bat commands to activate the environment")
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
@@ -173,6 +176,9 @@ def env_deactivate_setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to activate the environment")
|
||||
shells.add_argument(
|
||||
'--bat', action='store_const', dest='shell', const='bat',
|
||||
help="print bat commands to activate the environment")
|
||||
|
||||
|
||||
def env_deactivate(args):
|
||||
|
@@ -91,6 +91,8 @@ def external_find(args):
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
detected_packages = spack.detection.by_executable(packages_to_check)
|
||||
detected_packages.update(spack.detection.by_library(packages_to_check))
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
|
@@ -202,6 +202,12 @@ def display_env(env, args, decorator):
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
if args.bootstrap:
|
||||
tty.warn(
|
||||
"`spack find --bootstrap` is deprecated and will be removed in v0.19.",
|
||||
"Use `spack --bootstrap find` instead."
|
||||
)
|
||||
|
||||
if args.bootstrap:
|
||||
bootstrap_store_path = spack.bootstrap.store_path()
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
|
@@ -8,6 +8,7 @@
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.gpg
|
||||
|
||||
@@ -200,8 +201,13 @@ def gpg_verify(args):
|
||||
def gpg_publish(args):
|
||||
"""publish public keys to a build cache"""
|
||||
|
||||
# TODO(opadron): switch to using the mirror args once #17547 is merged
|
||||
mirror = args.directory
|
||||
mirror = None
|
||||
if args.directory:
|
||||
mirror = spack.mirror.Mirror(args.directory, args.directory)
|
||||
elif args.mirror_name:
|
||||
mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, regenerate_index=args.rebuild_index)
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import inspect
|
||||
import textwrap
|
||||
|
||||
from six.moves import zip_longest
|
||||
@@ -17,7 +18,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.package import preferred_version
|
||||
from spack.package import has_test_method, preferred_version
|
||||
|
||||
description = 'get detailed information on a particular package'
|
||||
section = 'basic'
|
||||
@@ -39,6 +40,25 @@ def pad(string):
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true', default=False,
|
||||
help="output all package information"
|
||||
)
|
||||
|
||||
options = [
|
||||
('--detectable', print_detectable.__doc__),
|
||||
('--maintainers', print_maintainers.__doc__),
|
||||
('--no-dependencies', 'do not ' + print_dependencies.__doc__),
|
||||
('--no-variants', 'do not ' + print_variants.__doc__),
|
||||
('--no-versions', 'do not ' + print_versions.__doc__),
|
||||
('--phases', print_phases.__doc__),
|
||||
('--tags', print_tags.__doc__),
|
||||
('--tests', print_tests.__doc__),
|
||||
('--virtuals', print_virtuals.__doc__),
|
||||
]
|
||||
for opt, help_comment in options:
|
||||
subparser.add_argument(opt, action='store_true', help=help_comment)
|
||||
|
||||
arguments.add_common_arguments(subparser, ['package'])
|
||||
|
||||
|
||||
@@ -145,27 +165,21 @@ def lines(self):
|
||||
yield " " + self.fmt % t
|
||||
|
||||
|
||||
def print_text_info(pkg):
|
||||
"""Print out a plain text description of a package."""
|
||||
def print_dependencies(pkg):
|
||||
"""output build, link, and run package dependencies"""
|
||||
|
||||
header = section_title(
|
||||
'{0}: '
|
||||
).format(pkg.build_system_class) + pkg.name
|
||||
color.cprint(header)
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Description:'))
|
||||
if pkg.__doc__:
|
||||
color.cprint(color.cescape(pkg.format_doc(indent=4)))
|
||||
else:
|
||||
color.cprint(" None")
|
||||
|
||||
color.cprint(section_title('Homepage: ') + pkg.homepage)
|
||||
|
||||
if len(pkg.maintainers) > 0:
|
||||
mnt = " ".join(['@@' + m for m in pkg.maintainers])
|
||||
for deptype in ('build', 'link', 'run'):
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Maintainers: ') + mnt)
|
||||
color.cprint(section_title('%s Dependencies:' % deptype.capitalize()))
|
||||
deps = sorted(pkg.dependencies_of_type(deptype))
|
||||
if deps:
|
||||
colify(deps, indent=4)
|
||||
else:
|
||||
color.cprint(' None')
|
||||
|
||||
|
||||
def print_detectable(pkg):
|
||||
"""output information on external detection"""
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Externally Detectable: '))
|
||||
@@ -187,6 +201,31 @@ def print_text_info(pkg):
|
||||
else:
|
||||
color.cprint(' False')
|
||||
|
||||
|
||||
def print_maintainers(pkg):
|
||||
"""output package maintainers"""
|
||||
|
||||
if len(pkg.maintainers) > 0:
|
||||
mnt = " ".join(['@@' + m for m in pkg.maintainers])
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Maintainers: ') + mnt)
|
||||
|
||||
|
||||
def print_phases(pkg):
|
||||
"""output installation phases"""
|
||||
|
||||
if hasattr(pkg, 'phases') and pkg.phases:
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Installation Phases:'))
|
||||
phase_str = ''
|
||||
for phase in pkg.phases:
|
||||
phase_str += " {0}".format(phase)
|
||||
color.cprint(phase_str)
|
||||
|
||||
|
||||
def print_tags(pkg):
|
||||
"""output package tags"""
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title("Tags: "))
|
||||
if hasattr(pkg, 'tags'):
|
||||
@@ -195,6 +234,90 @@ def print_text_info(pkg):
|
||||
else:
|
||||
color.cprint(" None")
|
||||
|
||||
|
||||
def print_tests(pkg):
|
||||
"""output relevant build-time and stand-alone tests"""
|
||||
|
||||
# Some built-in base packages (e.g., Autotools) define callback (e.g.,
|
||||
# check) inherited by descendant packages. These checks may not result
|
||||
# in build-time testing if the package's build does not implement the
|
||||
# expected functionality (e.g., a 'check' or 'test' targets).
|
||||
#
|
||||
# So the presence of a callback in Spack does not necessarily correspond
|
||||
# to the actual presence of built-time tests for a package.
|
||||
for callbacks, phase in [(pkg.build_time_test_callbacks, 'Build'),
|
||||
(pkg.install_time_test_callbacks, 'Install')]:
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Available {0} Phase Test Methods:'
|
||||
.format(phase)))
|
||||
names = []
|
||||
if callbacks:
|
||||
for name in callbacks:
|
||||
if getattr(pkg, name, False):
|
||||
names.append(name)
|
||||
|
||||
if names:
|
||||
colify(sorted(names), indent=4)
|
||||
else:
|
||||
color.cprint(' None')
|
||||
|
||||
# PackageBase defines an empty install/smoke test but we want to know
|
||||
# if it has been overridden and, therefore, assumed to be implemented.
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Stand-Alone/Smoke Test Methods:'))
|
||||
names = []
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
pkg_base = spack.package.PackageBase
|
||||
test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
|
||||
issubclass(cls, pkg_base) and cls.test != pkg_base.test]
|
||||
test_pkgs = list(set(test_pkgs))
|
||||
names.extend([(test.split()[1]).lower() for test in test_pkgs])
|
||||
|
||||
# TODO Refactor START
|
||||
# Use code from package.py's test_process IF this functionality is
|
||||
# accepted.
|
||||
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
||||
|
||||
# hack for compilers that are not dependencies (yet)
|
||||
# TODO: this all eventually goes away
|
||||
c_names = ('gcc', 'intel', 'intel-parallel-studio', 'pgi')
|
||||
if pkg.name in c_names:
|
||||
v_names.extend(['c', 'cxx', 'fortran'])
|
||||
if pkg.spec.satisfies('llvm+clang'):
|
||||
v_names.extend(['c', 'cxx'])
|
||||
# TODO Refactor END
|
||||
|
||||
v_specs = [spack.spec.Spec(v_name) for v_name in v_names]
|
||||
for v_spec in v_specs:
|
||||
try:
|
||||
pkg = v_spec.package
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
names.append('{0}.test'.format(pkg.name.lower()))
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass
|
||||
|
||||
if names:
|
||||
colify(sorted(names), indent=4)
|
||||
else:
|
||||
color.cprint(' None')
|
||||
|
||||
|
||||
def print_variants(pkg):
|
||||
"""output variants"""
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Variants:'))
|
||||
|
||||
formatter = VariantFormatter(pkg.variants)
|
||||
for line in formatter.lines:
|
||||
color.cprint(color.cescape(line))
|
||||
|
||||
|
||||
def print_versions(pkg):
|
||||
"""output versions"""
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Preferred version: '))
|
||||
|
||||
@@ -238,29 +361,9 @@ def print_text_info(pkg):
|
||||
line = version(' {0}'.format(pad(v))) + color.cescape(url)
|
||||
color.cprint(line)
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Variants:'))
|
||||
|
||||
formatter = VariantFormatter(pkg.variants)
|
||||
for line in formatter.lines:
|
||||
color.cprint(color.cescape(line))
|
||||
|
||||
if hasattr(pkg, 'phases') and pkg.phases:
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Installation Phases:'))
|
||||
phase_str = ''
|
||||
for phase in pkg.phases:
|
||||
phase_str += " {0}".format(phase)
|
||||
color.cprint(phase_str)
|
||||
|
||||
for deptype in ('build', 'link', 'run'):
|
||||
color.cprint('')
|
||||
color.cprint(section_title('%s Dependencies:' % deptype.capitalize()))
|
||||
deps = sorted(pkg.dependencies_of_type(deptype))
|
||||
if deps:
|
||||
colify(deps, indent=4)
|
||||
else:
|
||||
color.cprint(' None')
|
||||
def print_virtuals(pkg):
|
||||
"""output virtual packages"""
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Virtual Packages: '))
|
||||
@@ -280,9 +383,39 @@ def print_text_info(pkg):
|
||||
else:
|
||||
color.cprint(" None")
|
||||
|
||||
color.cprint('')
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
pkg = spack.repo.get(args.package)
|
||||
print_text_info(pkg)
|
||||
|
||||
# Output core package information
|
||||
header = section_title(
|
||||
'{0}: '
|
||||
).format(pkg.build_system_class) + pkg.name
|
||||
color.cprint(header)
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Description:'))
|
||||
if pkg.__doc__:
|
||||
color.cprint(color.cescape(pkg.format_doc(indent=4)))
|
||||
else:
|
||||
color.cprint(" None")
|
||||
|
||||
color.cprint(section_title('Homepage: ') + pkg.homepage)
|
||||
|
||||
# Now output optional information in expected order
|
||||
sections = [
|
||||
(args.all or args.maintainers, print_maintainers),
|
||||
(args.all or args.detectable, print_detectable),
|
||||
(args.all or args.tags, print_tags),
|
||||
(args.all or not args.no_versions, print_versions),
|
||||
(args.all or not args.no_variants, print_variants),
|
||||
(args.all or args.phases, print_phases),
|
||||
(args.all or not args.no_dependencies, print_dependencies),
|
||||
(args.all or args.virtuals, print_virtuals),
|
||||
(args.all or args.tests, print_tests),
|
||||
]
|
||||
for print_it, func in sections:
|
||||
if print_it:
|
||||
func(pkg)
|
||||
|
||||
color.cprint('')
|
||||
|
116
lib/spack/spack/cmd/installer/CMakeLists.txt
Normal file
116
lib/spack/spack/cmd/installer/CMakeLists.txt
Normal file
@@ -0,0 +1,116 @@
|
||||
cmake_minimum_required (VERSION 3.13)
|
||||
project(spack_installer NONE)
|
||||
|
||||
set(PYTHON_VERSION "3.9.0" CACHE STRING "Version of Python to build.")
|
||||
set(PY_DOWNLOAD_LINK "https://www.paraview.org/files/dependencies")
|
||||
set(PY_FILENAME "Python-${PYTHON_VERSION}-win64.tar.xz")
|
||||
set(PYTHON_DIR "Python-${PYTHON_VERSION}")
|
||||
|
||||
if (SPACK_VERSION)
|
||||
set(SPACK_DL "https://github.com/spack/spack/releases/download/v${SPACK_VERSION}")
|
||||
set(SPACK_FILENAME "spack-${SPACK_VERSION}.tar.gz")
|
||||
set(SPACK_DIR "spack-${SPACK_VERSION}")
|
||||
|
||||
# SPACK DOWLOAD AND EXTRACTION-----------------------------------
|
||||
file(DOWNLOAD "${SPACK_DL}/${SPACK_FILENAME}"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/${SPACK_FILENAME}"
|
||||
STATUS download_status
|
||||
)
|
||||
list(GET download_status 0 res)
|
||||
if(res)
|
||||
list(GET download_status 1 err)
|
||||
message(FATAL_ERROR "Failed to download ${SPACK_FILENAME} ${err}")
|
||||
endif()
|
||||
message(STATUS "Successfully downloaded ${SPACK_FILENAME}")
|
||||
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/${SPACK_FILENAME}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
|
||||
RESULT_VARIABLE res)
|
||||
if(NOT res EQUAL 0)
|
||||
message(FATAL_ERROR "Extraction of ${SPACK_FILENAME} failed.")
|
||||
endif()
|
||||
message(STATUS "Extracted ${SPACK_DIR}")
|
||||
SET(SPACK_SOURCE "${CMAKE_CURRENT_BINARY_DIR}/${SPACK_DIR}")
|
||||
elseif(SPACK_SOURCE)
|
||||
get_filename_component(SPACK_DIR ${SPACK_SOURCE} NAME)
|
||||
else()
|
||||
message(FATAL_ERROR "Must specify SPACK_VERSION or SPACK_SOURCE")
|
||||
endif()
|
||||
|
||||
|
||||
# GIT DOWNLOAD----------------------------------------------------
|
||||
set(GIT_FILENAME "Git-2.31.1-64-bit.exe")
|
||||
file(DOWNLOAD "https://github.com/git-for-windows/git/releases/download/v2.31.1.windows.1/Git-2.31.1-64-bit.exe"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/${GIT_FILENAME}"
|
||||
STATUS download_status
|
||||
EXPECTED_HASH "SHA256=c43611eb73ad1f17f5c8cc82ae51c3041a2e7279e0197ccf5f739e9129ce426e"
|
||||
)
|
||||
list(GET download_status 0 res)
|
||||
if(res)
|
||||
list(GET download_status 1 err)
|
||||
message(FATAL_ERROR "Failed to download ${GIT_FILENAME} ${err}")
|
||||
endif()
|
||||
message(STATUS "Successfully downloaded ${GIT_FILENAME}")
|
||||
|
||||
|
||||
# PYTHON DOWLOAD AND EXTRACTION-----------------------------------
|
||||
file(DOWNLOAD "${PY_DOWNLOAD_LINK}/${PY_FILENAME}"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/${PY_FILENAME}"
|
||||
STATUS download_status
|
||||
EXPECTED_HASH "SHA256=f6aeebc6d1ff77418678ed5612b64ce61be6bc9ef3ab9c291ac557abb1783420"
|
||||
)
|
||||
list(GET download_status 0 res)
|
||||
if(res)
|
||||
list(GET download_status 1 err)
|
||||
message(FATAL_ERROR "Failed to download ${PY_FILENAME} ${err}")
|
||||
endif()
|
||||
message(STATUS "Successfully downloaded ${PY_FILENAME}")
|
||||
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xfz
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/${PY_FILENAME}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
|
||||
RESULT_VARIABLE res)
|
||||
if(NOT res EQUAL 0)
|
||||
message(FATAL_ERROR "Extraction of ${PY_FILENAME} failed.")
|
||||
endif()
|
||||
message(STATUS "Extracted ${PY_FILENAME}.")
|
||||
|
||||
# license must be a .txt or .rtf file
|
||||
configure_file("${SPACK_LICENSE}" "${CMAKE_CURRENT_BINARY_DIR}/LICENSE.rtf" COPYONLY)
|
||||
|
||||
|
||||
#INSTALLATION COMMANDS---------------------------------------------------
|
||||
install(DIRECTORY "${SPACK_SOURCE}/"
|
||||
DESTINATION "${SPACK_DIR}")
|
||||
install(DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/Python-${PYTHON_VERSION}-win64/"
|
||||
DESTINATION "${PYTHON_DIR}")
|
||||
|
||||
# CPACK Installer Instructions
|
||||
set(CPACK_PACKAGE_NAME "Spack")
|
||||
set(CPACK_PACKAGE_VENDOR "Lawrence Livermore National Laboratories")
|
||||
set(CPACK_PACKAGE_VERSION "0.16.0")
|
||||
set(CPACK_PACKAGE_DESCRIPTION "A flexible package manager designed to support multiple versions, configurations, platforms, and compilers.")
|
||||
set(CPACK_PACKAGE_HOMEPAGE_URL "https://spack.io")
|
||||
set(CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_NAME}")
|
||||
set(CPACK_PACKAGE_ICON "${SPACK_LOGO}")
|
||||
set(CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/README.md")
|
||||
set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_BINARY_DIR}/LICENSE.rtf")
|
||||
#set(CPACK_RESOURCE_FILE_WELCOME "${CMAKE_CURRENT_SOURCE_DIR}/NOTICE")
|
||||
# WIX options (the default)
|
||||
|
||||
set(CPACK_GENERATOR "WIX")
|
||||
set(CPACK_WIX_PRODUCT_ICON "${SPACK_LOGO}")
|
||||
set(CPACK_WIX_UI_BANNER "${CMAKE_CURRENT_SOURCE_DIR}/banner493x58.bmp")
|
||||
set(CPACK_WIX_PATCH_FILE "${CMAKE_CURRENT_SOURCE_DIR}/patch.xml")
|
||||
set(CPACK_WIX_UPGRADE_GUID "D2C703E4-721D-44EC-8016-BCB96BB64E0B")
|
||||
set(CPACK_WIX_SKIP_PROGRAM_FOLDER TRUE)
|
||||
|
||||
set(SHORTCUT_GUID "099213BC-0D37-4F29-B758-60CA2A7E6DDA")
|
||||
# Set full path to icon, shortcut in spack.wxs
|
||||
set(SPACK_SHORTCUT "spack_cmd.bat")
|
||||
configure_file("spack.wxs.in" "${CMAKE_CURRENT_BINARY_DIR}/spack.wxs")
|
||||
configure_file("bundle.wxs.in" "${CMAKE_CURRENT_BINARY_DIR}/bundle.wxs")
|
||||
set(CPACK_WIX_EXTRA_SOURCES "${CMAKE_CURRENT_BINARY_DIR}/spack.wxs")
|
||||
|
||||
include(CPack)
|
85
lib/spack/spack/cmd/installer/README.md
Normal file
85
lib/spack/spack/cmd/installer/README.md
Normal file
@@ -0,0 +1,85 @@
|
||||
This README is a guide for creating a Spack installer for Windows using the
|
||||
``make-installer`` command. The installer is an executable file that users
|
||||
can run to install Spack like any other Windows binary.
|
||||
|
||||
Before proceeding, follow the setup instructions in Steps 1 and 2 of
|
||||
[Getting Started on Windows](https://spack.readthedocs.io/en/latest/getting_started.html#windows_support).
|
||||
|
||||
# Step 1: Install prerequisites
|
||||
|
||||
The only additional prerequisite for making the installer is Wix. Wix is a
|
||||
utility used for .msi creation and can be downloaded and installed at
|
||||
https://wixtoolset.org/releases/. The Visual Studio extensions are not
|
||||
necessary.
|
||||
|
||||
# Step 2: Make the installer
|
||||
|
||||
To use Spack, run ``spack_cmd.bat``. This will provide a Windows command
|
||||
prompt with an environment properly set up with Spack and its prerequisites.
|
||||
|
||||
Ensure that Python and CMake are on your PATH. If needed, you may add the
|
||||
CMake executable provided by Visual Studio to your path, which will look
|
||||
something like:
|
||||
|
||||
``C:\Program Files (x86)\Microsoft Visual Studio\<year>\<distribution>\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake``
|
||||
|
||||
**IMPORTANT**: If you use Tab to complete any part of this path, the console
|
||||
will automatically add quotation marks to the start and the end since it will
|
||||
see the spaces and want to parse the whole of it as a string. This is
|
||||
incorrect for our purposes so before submitting the command, ensure that the
|
||||
quotes are removed. You will encounter configuration errors if you fail to do
|
||||
this.
|
||||
|
||||
There are two ways to create the installer using Spack's ``make-installer``
|
||||
command. The recommended method is to build the installer using a local
|
||||
checkout of Spack source (release or development), using the
|
||||
`-s` flag to specify the directory where the local checkout is. For
|
||||
example, if the local checkout is in a directory called ``spack-develop``
|
||||
and want to generate an installer with the source there, you can use:
|
||||
|
||||
``spack make-installer -s spack-develop tmp``
|
||||
|
||||
Both the Spack source directory (e.g. ``spack-develop``) and installer
|
||||
destination directory (e.g. ``tmp``) may be an absolute path or relative to
|
||||
the current working directory. The entire contents of the specified
|
||||
directory will be included in the installer (e.g. .git files or local
|
||||
changes).
|
||||
|
||||
Alternatively, if you would like to create an installer from a release version
|
||||
of Spack, say, 0.16.0, and store it in ``tmp``, you can use the following
|
||||
command:
|
||||
|
||||
``spack make-installer -v 0.16.0 tmp``
|
||||
|
||||
**IMPORTANT**: Windows features are not currently supported in Spack's
|
||||
official release branches, so an installer created using this method will
|
||||
*not* run on Windows.
|
||||
|
||||
Regardless of your method, a file called ``Spack.exe`` will be created
|
||||
inside the destination directory. This executable bundles the Spack installer
|
||||
(``Spack.msi`` also located in destination directory) and the git installer.
|
||||
|
||||
# Step 3: Run the installer
|
||||
|
||||
After accepting the terms of service, select where on your computer you would
|
||||
like Spack installed, and after a few minutes Spack, Python and git will be
|
||||
installed and ready for use.
|
||||
|
||||
**IMPORTANT**: To avoid permissions issues, it is recommended to select an
|
||||
install location other than ``C:\Program Files``.
|
||||
|
||||
**IMPORTANT**: There is a specific option that must be chosen when letting Git
|
||||
install. When given the option of adjusting your ``PATH``, choose the
|
||||
``Git from the command line and also from 3rd-party software`` option. This will
|
||||
automatically update your ``PATH`` variable to include the ``git`` command.
|
||||
Certain Spack commands expect ``git`` to be part of the ``PATH``. If this step
|
||||
is not performed properly, certain Spack comands will not work.
|
||||
|
||||
If your Spack installation needs to be modified, repaired, or uninstalled,
|
||||
you can do any of these things by rerunning ``Spack.exe``.
|
||||
|
||||
Running the installer creates a shortcut on your desktop that, when
|
||||
launched, will run ``spack_cmd.bat`` and launch a console with its initial
|
||||
directory being wherever Spack was installed on your computer. If Python is
|
||||
found on your PATH, that will be used. If not, the Python included with the
|
||||
installer will be used when running Spack.
|
BIN
lib/spack/spack/cmd/installer/banner493x58.bmp
Normal file
BIN
lib/spack/spack/cmd/installer/banner493x58.bmp
Normal file
Binary file not shown.
After Width: | Height: | Size: 29 KiB |
23
lib/spack/spack/cmd/installer/bundle.wxs.in
Normal file
23
lib/spack/spack/cmd/installer/bundle.wxs.in
Normal file
@@ -0,0 +1,23 @@
|
||||
<?xml version="1.0"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi"
|
||||
xmlns:bal="http://schemas.microsoft.com/wix/BalExtension">
|
||||
<Bundle Version="1.0.0.0" UpgradeCode="63C4E213-0297-4CFE-BB7B-7A77EB68E966"
|
||||
IconSourceFile="@CPACK_WIX_PRODUCT_ICON@"
|
||||
Name="Spack Package Manager"
|
||||
Manufacturer="Lawrence Livermore National Laboratory">
|
||||
<BootstrapperApplicationRef Id="WixStandardBootstrapperApplication.RtfLicense">
|
||||
<bal:WixStandardBootstrapperApplication LicenseFile="@CPACK_RESOURCE_FILE_LICENSE@"/>
|
||||
</BootstrapperApplicationRef>
|
||||
<Chain>
|
||||
<MsiPackage
|
||||
SourceFile="Spack.msi"
|
||||
DisplayInternalUI="yes"/>
|
||||
<ExePackage
|
||||
SourceFile="Git-2.31.1-64-bit.exe"
|
||||
DetectCondition="ExeDetectedVariable"
|
||||
InstallCommand="@SPACK_GIT_VERBOSITY@ /SUPPRESSMSGBOXES"
|
||||
RepairCommand="/VERYSILENT"
|
||||
UninstallCommand="/VERYSILENT" />
|
||||
</Chain>
|
||||
</Bundle>
|
||||
</Wix>
|
10
lib/spack/spack/cmd/installer/patch.xml
Normal file
10
lib/spack/spack/cmd/installer/patch.xml
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<CPackWiXPatch>
|
||||
<CPackWiXFragment Id ="#PRODUCT">
|
||||
<Property Id="DISABLEADVTSHORTCUTS" Value="1"/>
|
||||
</CPackWiXFragment>
|
||||
<CPackWiXFragment Id ="#PRODUCTFEATURE">
|
||||
<ComponentGroupRef Id="ProductComponents" />
|
||||
<ComponentRef Id="ProgramMenuDir"/>
|
||||
</CPackWiXFragment>
|
||||
</CPackWiXPatch>
|
50
lib/spack/spack/cmd/installer/spack.wxs.in
Normal file
50
lib/spack/spack/cmd/installer/spack.wxs.in
Normal file
@@ -0,0 +1,50 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
|
||||
<Fragment>
|
||||
<Icon Id="icon.ico" SourceFile="@CPACK_WIX_PRODUCT_ICON@"/>
|
||||
<Property Id="ARPPRODUCTICON" Value="icon.ico" />
|
||||
</Fragment>
|
||||
<Fragment>
|
||||
<DirectoryRef Id="TARGETDIR">
|
||||
<Directory Id="DesktopFolder" Name="Desktop" >
|
||||
<Component Id="SpackDesktopShortCut" Guid="@CPACK_WIX_UPGRADE_GUID@">
|
||||
<Shortcut Id="SpackDesktopShortCut"
|
||||
Name="Spack Package Manager"
|
||||
Description="Spack package manager"
|
||||
Target="[INSTALL_ROOT]/@SPACK_DIR@/bin/@SPACK_SHORTCUT@"
|
||||
Icon="icon1.ico">
|
||||
<Icon Id="icon1.ico" SourceFile="@CPACK_WIX_PRODUCT_ICON@" />
|
||||
</Shortcut>
|
||||
<RegistryValue Root="HKCU" Key="Software\LLNL\Spack"
|
||||
Type="integer" Value="1" Name="SpackDesktopShortCut" KeyPath="yes" />
|
||||
</Component>
|
||||
</Directory>
|
||||
<Directory Id="ProgramMenuFolder" Name="Programs">
|
||||
<Directory Id="ApplicationProgramsFolder" Name="Spack">
|
||||
<Component Id="SpackStartShortCut" Guid="@SHORTCUT_GUID@">
|
||||
<Shortcut Id="SpackStartMenuShortCut"
|
||||
Name="Spack Package Manager"
|
||||
Description="Spack package manager"
|
||||
Target="[INSTALL_ROOT]/@SPACK_DIR@/bin/@SPACK_SHORTCUT@"
|
||||
Icon="icon2.ico">
|
||||
<Icon Id="icon2.ico" SourceFile="@CPACK_WIX_PRODUCT_ICON@" />
|
||||
</Shortcut>
|
||||
<RegistryValue Root="HKCU" Key="Software/LLNL/Spack"
|
||||
Type="integer" Value="1" Name="SpackStartMenuShortCut" KeyPath="yes" />
|
||||
</Component>
|
||||
<Component Id="ProgramMenuDir" Guid="*">
|
||||
<RemoveFolder Id="ProgramMenuDir" On="uninstall"/>
|
||||
<RegistryValue Root="HKMU" Key="Software\LLNL\Spack"
|
||||
Type="integer" Value="1" Name="installed" KeyPath="yes" />
|
||||
</Component>
|
||||
</Directory>
|
||||
</Directory>
|
||||
</DirectoryRef>
|
||||
</Fragment>
|
||||
<Fragment>
|
||||
<ComponentGroup Id="ProductComponents">
|
||||
<ComponentRef Id="SpackStartShortCut"/>
|
||||
<ComponentRef Id="SpackDesktopShortCut"/>
|
||||
</ComponentGroup>
|
||||
</Fragment>
|
||||
</Wix>
|
@@ -33,6 +33,9 @@ def setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to load the package")
|
||||
shells.add_argument(
|
||||
'--bat', action='store_const', dest='shell', const='bat',
|
||||
help="print bat commands to load the package")
|
||||
|
||||
subparser.add_argument(
|
||||
'--first',
|
||||
|
146
lib/spack/spack/cmd/make_installer.py
Normal file
146
lib/spack/spack/cmd/make_installer.py
Normal file
@@ -0,0 +1,146 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
from spack.spec import Spec
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
description = "generate Windows installer"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
|
||||
def txt_to_rtf(file_path):
|
||||
rtf_header = r"""{{\rtf1\ansi\deff0\nouicompat
|
||||
{{\fonttbl{{\f0\\fnil\fcharset0 Courier New;}}}}
|
||||
{{\colortbl ;\red0\green0\blue255;}}
|
||||
{{\*\generator Riched20 10.0.19041}}\viewkind4\uc1
|
||||
\f0\fs22\lang1033
|
||||
{}
|
||||
}}
|
||||
"""
|
||||
|
||||
def line_to_rtf(str):
|
||||
return str.replace("\n", "\\par")
|
||||
contents = ""
|
||||
with open(file_path, "r+") as f:
|
||||
for line in f.readlines():
|
||||
contents += line_to_rtf(line)
|
||||
return rtf_header.format(contents)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
spack_source_group = subparser.add_mutually_exclusive_group(required=True)
|
||||
spack_source_group.add_argument(
|
||||
'-v', '--spack-version', default="",
|
||||
help='download given spack version e.g. 0.16.0')
|
||||
spack_source_group.add_argument(
|
||||
'-s', '--spack-source', default="",
|
||||
help='full path to spack source')
|
||||
|
||||
subparser.add_argument(
|
||||
'-g', '--git-installer-verbosity', default="",
|
||||
choices=set(['SILENT', 'VERYSILENT']),
|
||||
help="Level of verbosity provided by bundled Git Installer.\
|
||||
Default is fully verbose",
|
||||
required=False, action='store', dest="git_verbosity"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
'output_dir', help="output directory")
|
||||
|
||||
|
||||
def make_installer(parser, args):
|
||||
"""
|
||||
Use CMake to generate WIX installer in newly created build directory
|
||||
"""
|
||||
if sys.platform == 'win32':
|
||||
output_dir = args.output_dir
|
||||
cmake_spec = Spec('cmake')
|
||||
cmake_spec.concretize()
|
||||
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
||||
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
||||
spack_source = args.spack_source
|
||||
git_verbosity = ""
|
||||
if args.git_verbosity:
|
||||
git_verbosity = "/" + args.git_verbosity
|
||||
|
||||
if spack_source:
|
||||
if not os.path.exists(spack_source):
|
||||
print("%s does not exist" % spack_source)
|
||||
return
|
||||
else:
|
||||
if not os.path.isabs(spack_source):
|
||||
spack_source = posixpath.abspath(spack_source)
|
||||
spack_source = convert_to_posix_path(spack_source)
|
||||
|
||||
spack_version = args.spack_version
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
source_dir = os.path.join(here, "installer")
|
||||
posix_root = convert_to_posix_path(spack.paths.spack_root)
|
||||
spack_license = posixpath.join(posix_root, "LICENSE-APACHE")
|
||||
rtf_spack_license = txt_to_rtf(spack_license)
|
||||
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
|
||||
|
||||
with open(spack_license, 'w') as rtf_license:
|
||||
written = rtf_license.write(rtf_spack_license)
|
||||
if written == 0:
|
||||
raise RuntimeError("Failed to generate properly formatted license file")
|
||||
spack_logo = posixpath.join(posix_root,
|
||||
"share/spack/logo/favicon.ico")
|
||||
|
||||
try:
|
||||
spack.util.executable.Executable(cmake_path)(
|
||||
'-S', source_dir, '-B', output_dir,
|
||||
'-DSPACK_VERSION=%s' % spack_version,
|
||||
'-DSPACK_SOURCE=%s' % spack_source,
|
||||
'-DSPACK_LICENSE=%s' % spack_license,
|
||||
'-DSPACK_LOGO=%s' % spack_logo,
|
||||
'-DSPACK_GIT_VERBOSITY=%s' % git_verbosity
|
||||
)
|
||||
except spack.util.executable.ProcessError:
|
||||
print("Failed to generate installer")
|
||||
return spack.util.executable.ProcessError.returncode
|
||||
|
||||
try:
|
||||
spack.util.executable.Executable(cpack_path)(
|
||||
"--config",
|
||||
"%s/CPackConfig.cmake" % output_dir,
|
||||
"-B",
|
||||
"%s/" % output_dir)
|
||||
except spack.util.executable.ProcessError:
|
||||
print("Failed to generate installer")
|
||||
return spack.util.executable.ProcessError.returncode
|
||||
try:
|
||||
spack.util.executable.Executable(os.environ.get('WIX') + '/bin/candle.exe')(
|
||||
'-ext',
|
||||
'WixBalExtension',
|
||||
'%s/bundle.wxs' % output_dir,
|
||||
'-out',
|
||||
'%s/bundle.wixobj' % output_dir
|
||||
)
|
||||
except spack.util.executable.ProcessError:
|
||||
print("Failed to generate installer chain")
|
||||
return spack.util.executable.ProcessError.returncode
|
||||
try:
|
||||
spack.util.executable.Executable(os.environ.get('WIX') + "/bin/light.exe")(
|
||||
"-sw1134",
|
||||
"-ext",
|
||||
"WixBalExtension",
|
||||
"%s/bundle.wixobj" % output_dir,
|
||||
'-out',
|
||||
'%s/Spack.exe' % output_dir
|
||||
)
|
||||
except spack.util.executable.ProcessError:
|
||||
print("Failed to generate installer chain")
|
||||
return spack.util.executable.ProcessError.returncode
|
||||
print("Successfully generated Spack.exe in %s" % (output_dir))
|
||||
else:
|
||||
print('The make-installer command is currently only supported on Windows.')
|
@@ -5,12 +5,9 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
@@ -18,7 +15,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.package_hash as ph
|
||||
from spack.util.executable import which
|
||||
|
||||
description = "query packages associated with particular git revisions"
|
||||
section = "developer"
|
||||
@@ -82,78 +78,19 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(hash_parser, ['spec'])
|
||||
|
||||
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return spack.repo.path.get_repo('builtin.mock').packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.path.get_repo('builtin').packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
# Wrapper around Executable for git to set working directory for all
|
||||
# invocations.
|
||||
#
|
||||
# Not using -C as that is not supported for git < 1.8.5.
|
||||
def __init__(self):
|
||||
self._git_cmd = which('git', required=True)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
with working_dir(packages_path()):
|
||||
return self._git_cmd(*args, **kwargs)
|
||||
|
||||
|
||||
_git = None
|
||||
|
||||
|
||||
def get_git():
|
||||
"""Get a git executable that runs *within* the packages path."""
|
||||
global _git
|
||||
if _git is None:
|
||||
_git = GitExe()
|
||||
return _git
|
||||
|
||||
|
||||
def list_packages(rev):
|
||||
git = get_git()
|
||||
|
||||
# git ls-tree does not support ... merge-base syntax, so do it manually
|
||||
if rev.endswith('...'):
|
||||
ref = rev.replace('...', '')
|
||||
rev = git('merge-base', ref, 'HEAD', output=str).strip()
|
||||
|
||||
output = git('ls-tree', '--name-only', rev, output=str)
|
||||
return sorted(line for line in output.split('\n')
|
||||
if line and not line.startswith('.'))
|
||||
|
||||
|
||||
def pkg_add(args):
|
||||
"""add a package to the git stage with `git add`"""
|
||||
git = get_git()
|
||||
|
||||
for pkg_name in args.packages:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" %
|
||||
pkg_name, filename)
|
||||
|
||||
git('add', filename)
|
||||
spack.repo.add_package_to_git_stage(args.packages)
|
||||
|
||||
|
||||
def pkg_list(args):
|
||||
"""list packages associated with a particular spack git revision"""
|
||||
colify(list_packages(args.rev))
|
||||
|
||||
|
||||
def diff_packages(rev1, rev2):
|
||||
p1 = set(list_packages(rev1))
|
||||
p2 = set(list_packages(rev2))
|
||||
return p1.difference(p2), p2.difference(p1)
|
||||
colify(spack.repo.list_packages(args.rev))
|
||||
|
||||
|
||||
def pkg_diff(args):
|
||||
"""compare packages available in two different git revisions"""
|
||||
u1, u2 = diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
|
||||
if u1:
|
||||
print("%s:" % args.rev1)
|
||||
@@ -168,45 +105,21 @@ def pkg_diff(args):
|
||||
|
||||
def pkg_removed(args):
|
||||
"""show packages removed since a commit"""
|
||||
u1, u2 = diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
if u1:
|
||||
colify(sorted(u1))
|
||||
|
||||
|
||||
def pkg_added(args):
|
||||
"""show packages added since a commit"""
|
||||
u1, u2 = diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
if u2:
|
||||
colify(sorted(u2))
|
||||
|
||||
|
||||
def pkg_changed(args):
|
||||
"""show packages changed since a commit"""
|
||||
lower_type = args.type.lower()
|
||||
if not re.match('^[arc]*$', lower_type):
|
||||
tty.die("Invald change type: '%s'." % args.type,
|
||||
"Can contain only A (added), R (removed), or C (changed)")
|
||||
|
||||
removed, added = diff_packages(args.rev1, args.rev2)
|
||||
|
||||
git = get_git()
|
||||
out = git('diff', '--relative', '--name-only', args.rev1, args.rev2,
|
||||
output=str).strip()
|
||||
|
||||
lines = [] if not out else re.split(r'\s+', out)
|
||||
changed = set()
|
||||
for path in lines:
|
||||
pkg_name, _, _ = path.partition(os.sep)
|
||||
if pkg_name not in added and pkg_name not in removed:
|
||||
changed.add(pkg_name)
|
||||
|
||||
packages = set()
|
||||
if 'a' in lower_type:
|
||||
packages |= added
|
||||
if 'r' in lower_type:
|
||||
packages |= removed
|
||||
if 'c' in lower_type:
|
||||
packages |= changed
|
||||
packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
|
||||
|
||||
if packages:
|
||||
colify(sorted(packages))
|
||||
|
@@ -118,6 +118,10 @@ def python_interpreter(args):
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push('import readline')
|
||||
# Provide tabcompletion
|
||||
console.push('from rlcompleter import Completer')
|
||||
console.push('readline.set_completer(Completer(locals()).complete)')
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
|
||||
console.interact("Spack version %s\nPython %s, %s %s"
|
||||
% (spack.spack_version, platform.python_version(),
|
||||
|
@@ -6,6 +6,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -142,9 +143,10 @@ def repo_list(args):
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
@@ -28,6 +28,7 @@
|
||||
description = "run spack's unit tests (wrapper around pytest)"
|
||||
section = "developer"
|
||||
level = "long"
|
||||
is_windows = sys.platform == 'win32'
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -130,7 +131,7 @@ def colorize(c, prefix):
|
||||
# in the future - so this manipulation might be fragile
|
||||
if nodetype.lower() == 'function':
|
||||
name_parts.append(item)
|
||||
key_end = os.path.join(*[x[1] for x in key_parts])
|
||||
key_end = os.path.join(*key_parts[-1][1].split('/'))
|
||||
key = next(f for f in files if f.endswith(key_end))
|
||||
tests[key].add(tuple(x[1] for x in name_parts))
|
||||
elif nodetype.lower() == 'class':
|
||||
@@ -179,8 +180,11 @@ def unit_test(parser, args, unknown_args):
|
||||
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if not is_windows:
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
if pytest is None:
|
||||
vendored_pytest_dir = os.path.join(
|
||||
|
@@ -32,6 +32,9 @@ def setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to load the package")
|
||||
shells.add_argument(
|
||||
'--bat', action='store_const', dest='shell', const='bat',
|
||||
help="print bat commands to load the package")
|
||||
|
||||
subparser.add_argument('-a', '--all', action='store_true',
|
||||
help='unload all loaded Spack packages.')
|
||||
|
@@ -23,6 +23,7 @@
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
__all__ = ['Compiler']
|
||||
|
||||
@@ -37,8 +38,12 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
version_arg (str): the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
output = compiler(
|
||||
version_arg, output=str, error=str, ignore_errors=ignore_errors)
|
||||
if version_arg:
|
||||
output = compiler(
|
||||
version_arg, output=str, error=str, ignore_errors=ignore_errors)
|
||||
else:
|
||||
output = compiler(
|
||||
output=str, error=str, ignore_errors=ignore_errors)
|
||||
return output
|
||||
|
||||
|
||||
@@ -153,6 +158,7 @@ def _parse_link_paths(string):
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _parse_non_system_link_dirs(string):
|
||||
"""Parses link paths out of compiler debug output.
|
||||
|
||||
@@ -325,7 +331,7 @@ def accessible_exe(exe):
|
||||
|
||||
# setup environment before verifying in case we have executable names
|
||||
# instead of absolute paths
|
||||
with self._compiler_environment():
|
||||
with self.compiler_environment():
|
||||
missing = [cmp for cmp in (self.cc, self.cxx, self.f77, self.fc)
|
||||
if cmp and not accessible_exe(cmp)]
|
||||
if missing:
|
||||
@@ -407,7 +413,7 @@ def _get_compiler_link_paths(self, paths):
|
||||
compiler_exe.add_default_arg(flag)
|
||||
|
||||
output = ''
|
||||
with self._compiler_environment():
|
||||
with self.compiler_environment():
|
||||
output = str(compiler_exe(
|
||||
self.verbose_flag, fin, '-o', fout,
|
||||
output=str, error=str)) # str for py2
|
||||
@@ -523,7 +529,7 @@ def get_real_version(self):
|
||||
modifications) to enable the compiler to run properly on any platform.
|
||||
"""
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self._compiler_environment():
|
||||
with self.compiler_environment():
|
||||
output = cc(self.version_argument,
|
||||
output=str, error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors))
|
||||
@@ -597,7 +603,12 @@ def __str__(self):
|
||||
str(self.operating_system)))))
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _compiler_environment(self):
|
||||
def compiler_environment(self):
|
||||
# yield immediately if no modules
|
||||
if not self.modules:
|
||||
yield
|
||||
return
|
||||
|
||||
# store environment to replace later
|
||||
backup_env = os.environ.copy()
|
||||
|
||||
|
@@ -42,7 +42,8 @@
|
||||
|
||||
_compiler_to_pkg = {
|
||||
'clang': 'llvm+clang',
|
||||
'oneapi': 'intel-oneapi-compilers'
|
||||
'oneapi': 'intel-oneapi-compilers',
|
||||
'rocmcc': 'llvm-amdgpu'
|
||||
}
|
||||
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
@@ -42,10 +43,10 @@ def opt_flags(self):
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
link_paths = {'cc': 'aocc/clang',
|
||||
'cxx': 'aocc/clang++',
|
||||
'f77': 'aocc/flang',
|
||||
'fc': 'aocc/flang'}
|
||||
link_paths = {'cc': os.path.join('aocc', 'clang'),
|
||||
'cxx': os.path.join('aocc', 'clang++'),
|
||||
'f77': os.path.join('aocc', 'flang'),
|
||||
'fc': os.path.join('aocc', 'flang')}
|
||||
|
||||
return link_paths
|
||||
|
||||
|
@@ -8,6 +8,7 @@
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
@@ -162,10 +163,10 @@ def setup_custom_environment(self, pkg, env):
|
||||
for fname in os.listdir(dev_dir):
|
||||
if fname in bins:
|
||||
os.unlink(os.path.join(dev_dir, fname))
|
||||
os.symlink(
|
||||
symlink(
|
||||
os.path.join(spack.paths.build_env_path, 'cc'),
|
||||
os.path.join(dev_dir, fname))
|
||||
|
||||
os.symlink(developer_root, xcode_link)
|
||||
symlink(developer_root, xcode_link)
|
||||
|
||||
env.set('DEVELOPER_DIR', xcode_link)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import spack.compiler
|
||||
@@ -22,10 +23,10 @@ class Arm(spack.compiler.Compiler):
|
||||
fc_names = ['armflang']
|
||||
|
||||
# Named wrapper links within lib/spack/env
|
||||
link_paths = {'cc': 'arm/armclang',
|
||||
'cxx': 'arm/armclang++',
|
||||
'f77': 'arm/armflang',
|
||||
'fc': 'arm/armflang'}
|
||||
link_paths = {'cc': os.path.join('arm', 'armclang'),
|
||||
'cxx': os.path.join('arm', 'armclang++'),
|
||||
'f77': os.path.join('arm', 'armflang'),
|
||||
'fc': os.path.join('arm', 'armflang')}
|
||||
|
||||
# The ``--version`` option seems to be the most consistent one for
|
||||
# arm compilers. Output looks like this:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user